diff --git a/.github/workflows/publish-immutable-actions.yml b/.github/workflows/publish-immutable-actions.yml new file mode 100644 index 000000000..87c020728 --- /dev/null +++ b/.github/workflows/publish-immutable-actions.yml @@ -0,0 +1,20 @@ +name: 'Publish Immutable Action Version' + +on: + release: + types: [published] + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + packages: write + + steps: + - name: Checking out + uses: actions/checkout@v4 + - name: Publish + id: publish + uses: actions/publish-immutable-action@0.0.3 diff --git a/.licenses/npm/undici.dep.yml b/.licenses/npm/undici.dep.yml index 58844ed3d..cc74a6d2d 100644 --- a/.licenses/npm/undici.dep.yml +++ b/.licenses/npm/undici.dep.yml @@ -1,6 +1,6 @@ --- name: undici -version: 5.28.3 +version: 5.28.4 type: npm summary: An HTTP/1.1 client, written from scratch for Node.js homepage: https://undici.nodejs.org diff --git a/README.md b/README.md index 86236a495..9843ee215 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ See [action.yml](action.yml) - uses: actions/setup-node@v4 with: # Version Spec of the version to use in SemVer notation. - # It also emits such aliases as lts, latest, nightly and canary builds + # It also admits such aliases as lts/*, latest, nightly and canary builds # Examples: 12.x, 10.15.1, >=10.15.0, lts/Hydrogen, 16-nightly, latest, node node-version: '' diff --git a/__tests__/cache-restore.test.ts b/__tests__/cache-restore.test.ts index 90153a403..0bbf28126 100644 --- a/__tests__/cache-restore.test.ts +++ b/__tests__/cache-restore.test.ts @@ -2,6 +2,7 @@ import * as core from '@actions/core'; import * as cache from '@actions/cache'; import * as path from 'path'; import * as glob from '@actions/glob'; +import osm from 'os'; import * as utils from '../src/cache-utils'; import {restoreCache} from '../src/cache-restore'; @@ -12,6 +13,7 @@ describe('cache-restore', () => { process.env.RUNNER_OS = 'Linux'; } const platform = process.env.RUNNER_OS; + const arch = 'arm64'; const commonPath = '/some/random/path'; const npmCachePath = `${commonPath}/npm`; const pnpmCachePath = `${commonPath}/pnpm`; @@ -52,6 +54,7 @@ describe('cache-restore', () => { let getCommandOutputSpy: jest.SpyInstance; let restoreCacheSpy: jest.SpyInstance; let hashFilesSpy: jest.SpyInstance; + let archSpy: jest.SpyInstance; beforeEach(() => { // core @@ -102,6 +105,10 @@ describe('cache-restore', () => { // cache-utils getCommandOutputSpy = jest.spyOn(utils, 'getCommandOutput'); + + // os + archSpy = jest.spyOn(osm, 'arch'); + archSpy.mockImplementation(() => arch); }); describe('Validate provided package manager', () => { @@ -135,7 +142,7 @@ describe('cache-restore', () => { await restoreCache(packageManager, ''); expect(hashFilesSpy).toHaveBeenCalled(); expect(infoSpy).toHaveBeenCalledWith( - `Cache restored from key: node-cache-${platform}-${packageManager}-${fileHash}` + `Cache restored from key: node-cache-${platform}-${arch}-${packageManager}-${fileHash}` ); expect(infoSpy).not.toHaveBeenCalledWith( `${packageManager} cache is not found` diff --git a/__tests__/cache-utils.test.ts b/__tests__/cache-utils.test.ts index a8c881e57..f14848de0 100644 --- a/__tests__/cache-utils.test.ts +++ b/__tests__/cache-utils.test.ts @@ -6,7 +6,7 @@ import { PackageManagerInfo, isCacheFeatureAvailable, supportedPackageManagers, - getCommandOutput, + isGhes, resetProjectDirectoriesMemoized } from '../src/cache-utils'; import fs from 'fs'; @@ -361,3 +361,41 @@ describe('cache-utils', () => { ); }); }); + +describe('isGhes', () => { + const pristineEnv = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = {...pristineEnv}; + }); + + afterAll(() => { + process.env = pristineEnv; + }); + + it('returns false when the GITHUB_SERVER_URL environment variable is not defined', () => { + delete process.env['GITHUB_SERVER_URL']; + expect(isGhes()).toBeFalsy(); + }); + + it('returns false when the GITHUB_SERVER_URL environment variable is set to github.com', () => { + process.env['GITHUB_SERVER_URL'] = 'https://github.com'; + expect(isGhes()).toBeFalsy(); + }); + + it('returns false when the GITHUB_SERVER_URL environment variable is set to a GitHub Enterprise Cloud-style URL', () => { + process.env['GITHUB_SERVER_URL'] = 'https://contoso.ghe.com'; + expect(isGhes()).toBeFalsy(); + }); + + it('returns false when the GITHUB_SERVER_URL environment variable has a .localhost suffix', () => { + process.env['GITHUB_SERVER_URL'] = 'https://mock-github.localhost'; + expect(isGhes()).toBeFalsy(); + }); + + it('returns true when the GITHUB_SERVER_URL environment variable is set to some other URL', () => { + process.env['GITHUB_SERVER_URL'] = 'https://src.onpremise.fabrikam.com'; + expect(isGhes()).toBeTruthy(); + }); +}); diff --git a/__tests__/data/package-lock.json b/__tests__/data/package-lock.json index 2e3a8e060..e741e5a98 100644 --- a/__tests__/data/package-lock.json +++ b/__tests__/data/package-lock.json @@ -1,395 +1,1401 @@ { - "name": "test", - "version": "1.0.0", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "accepts": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", - "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", - "requires": { - "mime-types": "~2.1.24", - "negotiator": "0.6.2" - } + "name": "data", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "data", + "license": "ISC", + "dependencies": { + "accepts": "^1.3.8", + "array-flatten": "^3.0.0", + "body-parser": "^1.20.3", + "bytes": "^3.1.2", + "content-disposition": "^0.5.4", + "content-type": "^1.0.5", + "cookie": "^0.5.0", + "cookie-signature": "^1.0.7", + "debug": "^4.0.0", + "depd": "^2.0.0", + "destroy": "^1.1.0", + "ee-first": "^1.1.1", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "express": "^5.0.0", + "finalhandler": "^1.2.0", + "forwarded": "^0.2.0", + "fresh": "^0.5.2", + "http-errors": "^1.8.0", + "iconv-lite": "^0.6.0", + "inherits": "^2.0.4", + "ipaddr.js": "^2.2.0", + "media-typer": "^1.1.0", + "merge-descriptors": "^2.0.0", + "methods": "^1.1.2", + "mime": "^1.6.0", + "mime-db": "^1.51.0", + "mime-types": "^2.1.35", + "ms": "^2.1.2", + "negotiator": "^0.6.3", + "on-finished": "^2.4.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0", + "proxy-addr": "^2.0.7", + "qs": "^6.8.0", + "range-parser": "^1.2.1", + "raw-body": "^2.4.1", + "safe-buffer": "^5.2.1", + "safer-buffer": "^2.1.2", + "send": "^0.19.0", + "serve-static": "^1.15.0", + "setprototypeof": "^1.2.0", + "statuses": "^2.0.1", + "type-is": "^1.6.18", + "utils-merge": "^1.0.1", + "vary": "^1.1.2" }, - "array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" - }, - "body-parser": { - "version": "1.19.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", - "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", - "requires": { - "bytes": "3.1.0", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "1.7.2", - "iconv-lite": "0.4.24", - "on-finished": "~2.3.0", - "qs": "6.7.0", - "raw-body": "2.4.0", - "type-is": "~1.6.17" - } + "engines": { + "node": "^20.0.0" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" }, - "bytes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", - "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" - }, - "content-disposition": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", - "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", - "requires": { - "safe-buffer": "5.1.2" - } + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/array-flatten": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-3.0.0.tgz", + "integrity": "sha512-zPMVc3ZYlGLNk4mpK1NzP2wg0ml9t7fUgDsayR5Y5rSzxQilzR9FGu/EH2jQOcKSAeAfWeylyW8juy3OkWRvNA==", + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" }, - "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" - }, - "cookie": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", - "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" - }, - "cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" }, - "depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" - }, - "destroy": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" - }, - "encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" - }, - "escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" - }, - "etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" - }, - "express": { - "version": "4.17.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", - "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", - "requires": { - "accepts": "~1.3.7", - "array-flatten": "1.1.1", - "body-parser": "1.19.0", - "content-disposition": "0.5.3", - "content-type": "~1.0.4", - "cookie": "0.4.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "~1.1.2", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "~1.1.2", - "fresh": "0.5.2", - "merge-descriptors": "1.0.1", - "methods": "~1.1.2", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.5", - "qs": "6.7.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.1.2", - "send": "0.17.1", - "serve-static": "1.14.1", - "setprototypeof": "1.1.1", - "statuses": "~1.5.0", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - } + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/body-parser/node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" }, - "finalhandler": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", - "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", - "requires": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "statuses": "~1.5.0", - "unpipe": "~1.0.0" - } + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" }, - "forwarded": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" - }, - "fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" - }, - "http-errors": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", - "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", - "requires": { - "depd": "~1.1.2", - "inherits": "2.0.3", - "setprototypeof": "1.1.1", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.0" - } + "engines": { + "node": ">= 0.4" }, - "iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" }, - "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" - }, - "ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.1.tgz", + "integrity": "sha512-78KWk9T26NhzXtuL26cIJ8/qNHANyJ/ZYrmEXFzUmhZdjpBv+DlWlOANRTGBt48YcyslsLrj0bMLFTmXvLRCOw==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" }, - "media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" - }, - "merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" - }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" - }, - "mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" - }, - "mime-db": { - "version": "1.47.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.47.0.tgz", - "integrity": "sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw==" - }, - "mime-types": { - "version": "2.1.30", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.30.tgz", - "integrity": "sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg==", - "requires": { - "mime-db": "1.47.0" - } + "engines": { + "node": ">=6.0" }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - }, - "negotiator": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", - "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" - }, - "on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", - "requires": { - "ee-first": "1.1.1" + "peerDependenciesMeta": { + "supports-color": { + "optional": true } + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, - "parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" - }, - "path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" - }, - "proxy-addr": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", - "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", - "requires": { - "forwarded": "~0.1.2", - "ipaddr.js": "1.9.1" - } + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.4" }, - "qs": { - "version": "6.7.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", - "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" - }, - "range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" - }, - "raw-body": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", - "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", - "requires": { - "bytes": "3.1.0", - "http-errors": "1.7.2", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.0.0.tgz", + "integrity": "sha512-V4UkHQc+B7ldh1YC84HCXHwf60M4BOMvp9rkvTUWCK5apqDC1Esnbid4wm6nFyVuDy8XMfETsJw5lsIGBWyo0A==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.0.1", + "content-disposition": "^1.0.0", + "content-type": "~1.0.4", + "cookie": "0.6.0", + "cookie-signature": "^1.2.1", + "debug": "4.3.6", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "^2.0.0", + "fresh": "2.0.0", + "http-errors": "2.0.0", + "merge-descriptors": "^2.0.0", + "methods": "~1.1.2", + "mime-types": "^3.0.0", + "on-finished": "2.4.1", + "once": "1.4.0", + "parseurl": "~1.3.3", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "router": "^2.0.0", + "safe-buffer": "5.2.1", + "send": "^1.1.0", + "serve-static": "^2.1.0", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "^2.0.0", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/express/node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/body-parser": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.0.1.tgz", + "integrity": "sha512-PagxbjvuPH6tv0f/kdVbFGcb79D236SLcDTs6DrQ7GizJ88S1UWP4nMXFEo/I4fdhGRGabvFfFjVGm3M7U8JwA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "3.1.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.5.2", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "^3.0.0", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/express/node_modules/body-parser/node_modules/debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/body-parser/node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/body-parser/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/body-parser/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/express/node_modules/body-parser/node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true } + } + }, + "node_modules/express/node_modules/finalhandler": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.0.0.tgz", + "integrity": "sha512-MX6Zo2adDViYh+GcxxB1dpO43eypOGUOL12rLCOTMQv/DfIbpSJUy4oQIIZhVZkH9e+bZWKMon0XHFEju16tkQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/finalhandler/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/express/node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/iconv-lite": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.5.2.tgz", + "integrity": "sha512-kERHXvpSaB4aU3eANwidg79K8FlrN77m8G9V+0vOR3HYaRifrlwMEpT7ZBJqLSEIHnEgJTHcWK82wwLwwKwtag==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "requires": { - "lru-cache": "^6.0.0" + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/express/node_modules/mime-types": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", + "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.53.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "license": "MIT" + }, + "node_modules/express/node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/express/node_modules/send": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.1.0.tgz", + "integrity": "sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "destroy": "^1.2.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^0.5.2", + "http-errors": "^2.0.0", + "mime-types": "^2.1.35", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/express/node_modules/send/node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true } + } + }, + "node_modules/express/node_modules/send/node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/send/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/send/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/express/node_modules/serve-static": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.1.0.tgz", + "integrity": "sha512-A3We5UfEjG8Z7VkDv6uItWw6HY2bBSBJT1KtVESn6EOoOr2jAxNhxWCLY3jDE2WcuHXByWju74ck3ZgLwL8xmA==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/express/node_modules/type-is": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.0.tgz", + "integrity": "sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" }, - "send": { - "version": "0.17.1", - "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", - "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", - "requires": { - "debug": "2.6.9", - "depd": "~1.1.2", - "destroy": "~1.0.4", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "~1.7.2", - "mime": "1.6.0", - "ms": "2.1.1", - "on-finished": "~2.3.0", - "range-parser": "~1.2.1", - "statuses": "~1.5.0" + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", + "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", + "license": "MIT", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/http-errors/node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", + "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.53.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", + "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-to-regexp": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.1.0.tgz", + "integrity": "sha512-Bqn3vc8CMHty6zuD+tG23s6v2kwxslHEhTj4eYaVKGIEB+YX/2wd0/rgXLFD9G9id9KCtbVy/3ZgmvZjpa0UdQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/proxy-addr/node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/raw-body/node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/raw-body/node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/router": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.0.0.tgz", + "integrity": "sha512-dIM5zVoG8xhC6rnSN8uoAgFARwTE7BQs8YwHEvK0VCmfxQXMaOuA1uiR1IPwsW7JyK5iTt7Od/TC9StasS2NPQ==", + "license": "MIT", + "dependencies": { + "array-flatten": "3.0.0", + "is-promise": "4.0.0", + "methods": "~1.1.2", + "parseurl": "~1.3.3", + "path-to-regexp": "^8.0.0", + "setprototypeof": "1.2.0", + "utils-merge": "1.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" }, - "dependencies": { - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" - } + { + "type": "consulting", + "url": "https://feross.org/support" } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" }, - "serve-static": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", - "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", - "requires": { - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.17.1" - } + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" }, - "setprototypeof": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", - "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" - }, - "statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" - }, - "toidentifier": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", - "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" - }, - "type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "requires": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - } + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" }, - "unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" }, - "utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, - "vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" } - } \ No newline at end of file + } +} diff --git a/__tests__/data/pnpm-lock.yaml b/__tests__/data/pnpm-lock.yaml index 7b8f8cd8f..0b8b3d14c 100644 --- a/__tests__/data/pnpm-lock.yaml +++ b/__tests__/data/pnpm-lock.yaml @@ -1,455 +1,1256 @@ -lockfileVersion: 5.3 +lockfileVersion: '9.0' -specifiers: - express: ^4.17.1 +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false -dependencies: - express: 4.17.1 +importers: + .: + dependencies: + accepts: + specifier: ^1.3.8 + version: 1.3.8 + array-flatten: + specifier: ^3.0.0 + version: 3.0.0 + body-parser: + specifier: ^1.20.0 + version: 1.20.3 + bytes: + specifier: ^3.1.2 + version: 3.1.2 + content-disposition: + specifier: ^0.5.4 + version: 0.5.4 + content-type: + specifier: ^1.0.5 + version: 1.0.5 + cookie: + specifier: ^0.5.0 + version: 0.5.0 + cookie-signature: + specifier: ^1.0.7 + version: 1.2.1 + debug: + specifier: ^4.0.0 + version: 4.3.7 + depd: + specifier: ^2.0.0 + version: 2.0.0 + destroy: + specifier: ^1.1.0 + version: 1.2.0 + ee-first: + specifier: ^1.1.1 + version: 1.1.1 + encodeurl: + specifier: ^2.0.0 + version: 2.0.0 + escape-html: + specifier: ^1.0.3 + version: 1.0.3 + etag: + specifier: ^1.8.1 + version: 1.8.1 + express: + specifier: ^5.0.0 + version: 5.0.0 + finalhandler: + specifier: ^1.2.0 + version: 1.3.1 + forwarded: + specifier: ^0.2.0 + version: 0.2.0 + fresh: + specifier: ^0.5.2 + version: 0.5.2 + http-errors: + specifier: ^1.8.0 + version: 1.8.1 + iconv-lite: + specifier: ^0.6.0 + version: 0.6.3 + inherits: + specifier: ^2.0.4 + version: 2.0.4 + ipaddr.js: + specifier: ^2.2.0 + version: 2.2.0 + media-typer: + specifier: ^1.1.0 + version: 1.1.0 + merge-descriptors: + specifier: ^2.0.0 + version: 2.0.0 + methods: + specifier: ^1.1.2 + version: 1.1.2 + mime: + specifier: ^1.6.0 + version: 1.6.0 + mime-db: + specifier: ^1.51.0 + version: 1.53.0 + mime-types: + specifier: ^2.1.35 + version: 2.1.35 + ms: + specifier: ^2.1.2 + version: 2.1.3 + negotiator: + specifier: ^0.6.3 + version: 0.6.3 + on-finished: + specifier: ^2.4.0 + version: 2.4.1 + parseurl: + specifier: ^1.3.3 + version: 1.3.3 + path-to-regexp: + specifier: ^8.0.0 + version: 8.1.0 + proxy-addr: + specifier: ^2.0.7 + version: 2.0.7 + qs: + specifier: ^6.8.0 + version: 6.13.0 + range-parser: + specifier: ^1.2.1 + version: 1.2.1 + raw-body: + specifier: ^2.4.1 + version: 2.5.2 + safe-buffer: + specifier: ^5.2.1 + version: 5.2.1 + safer-buffer: + specifier: ^2.1.2 + version: 2.1.2 + send: + specifier: ^0.18.0 + version: 0.18.0 + serve-static: + specifier: ^1.15.0 + version: 1.16.2 + setprototypeof: + specifier: ^1.2.0 + version: 1.2.0 + statuses: + specifier: ^2.0.1 + version: 2.0.1 + type-is: + specifier: ^1.6.18 + version: 1.6.18 + utils-merge: + specifier: ^1.0.1 + version: 1.0.1 + vary: + specifier: ^1.1.2 + version: 1.1.2 packages: - /accepts/1.3.7: + accepts@1.3.8: resolution: { - integrity: sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== + integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== } engines: {node: '>= 0.6'} - dependencies: - mime-types: 2.1.31 - negotiator: 0.6.2 - dev: false - /array-flatten/1.1.1: - resolution: {integrity: sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=} - dev: false + accepts@2.0.0: + resolution: + { + integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng== + } + engines: {node: '>= 0.6'} - /body-parser/1.19.0: + array-flatten@3.0.0: resolution: { - integrity: sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== + integrity: sha512-zPMVc3ZYlGLNk4mpK1NzP2wg0ml9t7fUgDsayR5Y5rSzxQilzR9FGu/EH2jQOcKSAeAfWeylyW8juy3OkWRvNA== } - engines: {node: '>= 0.8'} - dependencies: - bytes: 3.1.0 - content-type: 1.0.4 - debug: 2.6.9 - depd: 1.1.2 - http-errors: 1.7.2 - iconv-lite: 0.4.24 - on-finished: 2.3.0 - qs: 6.7.0 - raw-body: 2.4.0 - type-is: 1.6.18 - dev: false - /bytes/3.1.0: + body-parser@1.20.3: + resolution: + { + integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== + } + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + + body-parser@2.0.1: resolution: { - integrity: sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== + integrity: sha512-PagxbjvuPH6tv0f/kdVbFGcb79D236SLcDTs6DrQ7GizJ88S1UWP4nMXFEo/I4fdhGRGabvFfFjVGm3M7U8JwA== + } + engines: {node: '>= 0.10'} + + bytes@3.1.2: + resolution: + { + integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== } engines: {node: '>= 0.8'} - dev: false - /content-disposition/0.5.3: + call-bind@1.0.7: + resolution: + { + integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + } + engines: {node: '>= 0.4'} + + content-disposition@0.5.4: resolution: { - integrity: sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== + integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + } + engines: {node: '>= 0.6'} + + content-disposition@1.0.0: + resolution: + { + integrity: sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg== } engines: {node: '>= 0.6'} - dependencies: - safe-buffer: 5.1.2 - dev: false - /content-type/1.0.4: + content-type@1.0.5: resolution: { - integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== } engines: {node: '>= 0.6'} - dev: false - /cookie-signature/1.0.6: - resolution: {integrity: sha1-4wOogrNCzD7oylE6eZmXNNqzriw=} - dev: false + cookie-signature@1.2.1: + resolution: + { + integrity: sha512-78KWk9T26NhzXtuL26cIJ8/qNHANyJ/ZYrmEXFzUmhZdjpBv+DlWlOANRTGBt48YcyslsLrj0bMLFTmXvLRCOw== + } + engines: {node: '>=6.6.0'} + + cookie@0.5.0: + resolution: + { + integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + } + engines: {node: '>= 0.6'} - /cookie/0.4.0: + cookie@0.6.0: resolution: { - integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== + integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== } engines: {node: '>= 0.6'} - dev: false - /debug/2.6.9: + debug@2.6.9: resolution: { integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== } - dependencies: - ms: 2.0.0 - dev: false + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@3.1.0: + resolution: + { + integrity: sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== + } + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.3.6: + resolution: + { + integrity: sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg== + } + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.3.7: + resolution: + { + integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== + } + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + define-data-property@1.1.4: + resolution: + { + integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + } + engines: {node: '>= 0.4'} - /depd/1.1.2: - resolution: {integrity: sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=} + depd@1.1.2: + resolution: + { + integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + } engines: {node: '>= 0.6'} - dev: false - /destroy/1.0.4: - resolution: {integrity: sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=} - dev: false + depd@2.0.0: + resolution: + { + integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + } + engines: {node: '>= 0.8'} + + destroy@1.2.0: + resolution: + { + integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + } + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + + ee-first@1.1.1: + resolution: + { + integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + } - /ee-first/1.1.1: - resolution: {integrity: sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=} - dev: false + encodeurl@1.0.2: + resolution: + { + integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + } + engines: {node: '>= 0.8'} - /encodeurl/1.0.2: - resolution: {integrity: sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=} + encodeurl@2.0.0: + resolution: + { + integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + } engines: {node: '>= 0.8'} - dev: false - /escape-html/1.0.3: - resolution: {integrity: sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=} - dev: false + es-define-property@1.0.0: + resolution: + { + integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + } + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: + { + integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + } + engines: {node: '>= 0.4'} + + escape-html@1.0.3: + resolution: + { + integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + } - /etag/1.8.1: - resolution: {integrity: sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=} + etag@1.8.1: + resolution: + { + integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + } engines: {node: '>= 0.6'} - dev: false - /express/4.17.1: + express@5.0.0: resolution: { - integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== + integrity: sha512-V4UkHQc+B7ldh1YC84HCXHwf60M4BOMvp9rkvTUWCK5apqDC1Esnbid4wm6nFyVuDy8XMfETsJw5lsIGBWyo0A== } - engines: {node: '>= 0.10.0'} - dependencies: - accepts: 1.3.7 - array-flatten: 1.1.1 - body-parser: 1.19.0 - content-disposition: 0.5.3 - content-type: 1.0.4 - cookie: 0.4.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 1.1.2 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - finalhandler: 1.1.2 - fresh: 0.5.2 - merge-descriptors: 1.0.1 - methods: 1.1.2 - on-finished: 2.3.0 - parseurl: 1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: 2.0.7 - qs: 6.7.0 - range-parser: 1.2.1 - safe-buffer: 5.1.2 - send: 0.17.1 - serve-static: 1.14.1 - setprototypeof: 1.1.1 - statuses: 1.5.0 - type-is: 1.6.18 - utils-merge: 1.0.1 - vary: 1.1.2 - dev: false + engines: {node: '>= 18'} - /finalhandler/1.1.2: + finalhandler@1.3.1: resolution: { - integrity: sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== + integrity: sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== + } + engines: {node: '>= 0.8'} + + finalhandler@2.0.0: + resolution: + { + integrity: sha512-MX6Zo2adDViYh+GcxxB1dpO43eypOGUOL12rLCOTMQv/DfIbpSJUy4oQIIZhVZkH9e+bZWKMon0XHFEju16tkQ== } engines: {node: '>= 0.8'} - dependencies: - debug: 2.6.9 - encodeurl: 1.0.2 - escape-html: 1.0.3 - on-finished: 2.3.0 - parseurl: 1.3.3 - statuses: 1.5.0 - unpipe: 1.0.0 - dev: false - /forwarded/0.2.0: + forwarded@0.2.0: resolution: { integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== } engines: {node: '>= 0.6'} - dev: false - /fresh/0.5.2: - resolution: {integrity: sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=} + fresh@0.5.2: + resolution: + { + integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + } engines: {node: '>= 0.6'} - dev: false - /http-errors/1.7.2: + fresh@2.0.0: resolution: { - integrity: sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== + integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A== } - engines: {node: '>= 0.6'} - dependencies: - depd: 1.1.2 - inherits: 2.0.3 - setprototypeof: 1.1.1 - statuses: 1.5.0 - toidentifier: 1.0.0 - dev: false + engines: {node: '>= 0.8'} - /http-errors/1.7.3: + function-bind@1.1.2: resolution: { - integrity: sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== + integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + } + + get-intrinsic@1.2.4: + resolution: + { + integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + } + engines: {node: '>= 0.4'} + + gopd@1.0.1: + resolution: + { + integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + } + + has-property-descriptors@1.0.2: + resolution: + { + integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + } + + has-proto@1.0.3: + resolution: + { + integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + } + engines: {node: '>= 0.4'} + + has-symbols@1.0.3: + resolution: + { + integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + } + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: + { + integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + } + engines: {node: '>= 0.4'} + + http-errors@1.8.1: + resolution: + { + integrity: sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g== } engines: {node: '>= 0.6'} - dependencies: - depd: 1.1.2 - inherits: 2.0.4 - setprototypeof: 1.1.1 - statuses: 1.5.0 - toidentifier: 1.0.0 - dev: false - /iconv-lite/0.4.24: + http-errors@2.0.0: + resolution: + { + integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + } + engines: {node: '>= 0.8'} + + iconv-lite@0.4.24: resolution: { integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== } engines: {node: '>=0.10.0'} - dependencies: - safer-buffer: 2.1.2 - dev: false - /inherits/2.0.3: - resolution: {integrity: sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=} - dev: false + iconv-lite@0.5.2: + resolution: + { + integrity: sha512-kERHXvpSaB4aU3eANwidg79K8FlrN77m8G9V+0vOR3HYaRifrlwMEpT7ZBJqLSEIHnEgJTHcWK82wwLwwKwtag== + } + engines: {node: '>=0.10.0'} + + iconv-lite@0.6.3: + resolution: + { + integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + } + engines: {node: '>=0.10.0'} - /inherits/2.0.4: + inherits@2.0.4: resolution: { integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== } - dev: false - /ipaddr.js/1.9.1: + ipaddr.js@1.9.1: resolution: { integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== } engines: {node: '>= 0.10'} - dev: false - /media-typer/0.3.0: - resolution: {integrity: sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=} - engines: {node: '>= 0.6'} - dev: false + ipaddr.js@2.2.0: + resolution: + { + integrity: sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA== + } + engines: {node: '>= 10'} - /merge-descriptors/1.0.1: - resolution: {integrity: sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=} - dev: false + is-promise@4.0.0: + resolution: + { + integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== + } - /methods/1.1.2: - resolution: {integrity: sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=} + media-typer@0.3.0: + resolution: + { + integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + } engines: {node: '>= 0.6'} - dev: false - /mime-db/1.48.0: + media-typer@1.1.0: resolution: { - integrity: sha512-FM3QwxV+TnZYQ2aRqhlKBMHxk10lTbMt3bBkMAp54ddrNeVSfcQYOOKuGuy3Ddrm38I04If834fOUSq1yzslJQ== + integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw== } - engines: {node: '>= 0.6'} - dev: false + engines: {node: '>= 0.8'} - /mime-types/2.1.31: + merge-descriptors@2.0.0: resolution: { - integrity: sha512-XGZnNzm3QvgKxa8dpzyhFTHmpP3l5YNusmne07VUOXxou9CqUqYa/HBy124RqtVh/O2pECas/MOcsDgpilPOPg== + integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g== } - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.48.0 - dev: false + engines: {node: '>=18'} - /mime/1.6.0: + methods@1.1.2: resolution: { - integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== } - engines: {node: '>=4'} - hasBin: true - dev: false + engines: {node: '>= 0.6'} - /ms/2.0.0: - resolution: {integrity: sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=} - dev: false + mime-db@1.52.0: + resolution: + { + integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + } + engines: {node: '>= 0.6'} - /ms/2.1.1: + mime-db@1.53.0: resolution: { - integrity: sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + integrity: sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg== } - dev: false + engines: {node: '>= 0.6'} - /negotiator/0.6.2: + mime-types@2.1.35: resolution: { - integrity: sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== + integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== } engines: {node: '>= 0.6'} - dev: false - /on-finished/2.3.0: - resolution: {integrity: sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=} - engines: {node: '>= 0.8'} - dependencies: - ee-first: 1.1.1 - dev: false + mime-types@3.0.0: + resolution: + { + integrity: sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w== + } + engines: {node: '>= 0.6'} - /parseurl/1.3.3: + mime@1.6.0: resolution: { - integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== } - engines: {node: '>= 0.8'} - dev: false + engines: {node: '>=4'} + hasBin: true - /path-to-regexp/0.1.7: - resolution: {integrity: sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=} - dev: false + ms@2.0.0: + resolution: + { + integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + } - /proxy-addr/2.0.7: + ms@2.1.2: resolution: { - integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== } - engines: {node: '>= 0.10'} - dependencies: - forwarded: 0.2.0 - ipaddr.js: 1.9.1 - dev: false - /qs/6.7.0: + ms@2.1.3: resolution: { - integrity: sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== + integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== } - engines: {node: '>=0.6'} - dev: false - /range-parser/1.2.1: + negotiator@0.6.3: resolution: { - integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== } engines: {node: '>= 0.6'} - dev: false - /raw-body/2.4.0: + negotiator@1.0.0: resolution: { - integrity: sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== + integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg== } - engines: {node: '>= 0.8'} - dependencies: - bytes: 3.1.0 - http-errors: 1.7.2 - iconv-lite: 0.4.24 - unpipe: 1.0.0 - dev: false + engines: {node: '>= 0.6'} - /safe-buffer/5.1.2: + object-inspect@1.13.2: resolution: { - integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + integrity: sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== } - dev: false + engines: {node: '>= 0.4'} - /safer-buffer/2.1.2: + on-finished@2.4.1: resolution: { - integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== } - dev: false + engines: {node: '>= 0.8'} - /send/0.17.1: + once@1.4.0: resolution: { - integrity: sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== + integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== } - engines: {node: '>= 0.8.0'} - dependencies: - debug: 2.6.9 - depd: 1.1.2 - destroy: 1.0.4 - encodeurl: 1.0.2 - escape-html: 1.0.3 - etag: 1.8.1 - fresh: 0.5.2 - http-errors: 1.7.3 - mime: 1.6.0 - ms: 2.1.1 - on-finished: 2.3.0 - range-parser: 1.2.1 - statuses: 1.5.0 - dev: false - /serve-static/1.14.1: + parseurl@1.3.3: resolution: { - integrity: sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== + integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== } - engines: {node: '>= 0.8.0'} - dependencies: - encodeurl: 1.0.2 - escape-html: 1.0.3 - parseurl: 1.3.3 - send: 0.17.1 - dev: false + engines: {node: '>= 0.8'} - /setprototypeof/1.1.1: + path-to-regexp@8.1.0: resolution: { - integrity: sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== + integrity: sha512-Bqn3vc8CMHty6zuD+tG23s6v2kwxslHEhTj4eYaVKGIEB+YX/2wd0/rgXLFD9G9id9KCtbVy/3ZgmvZjpa0UdQ== } - dev: false + engines: {node: '>=16'} - /statuses/1.5.0: - resolution: {integrity: sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=} - engines: {node: '>= 0.6'} - dev: false + proxy-addr@2.0.7: + resolution: + { + integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + } + engines: {node: '>= 0.10'} - /toidentifier/1.0.0: + qs@6.13.0: resolution: { - integrity: sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== + integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== } engines: {node: '>=0.6'} - dev: false - /type-is/1.6.18: + range-parser@1.2.1: resolution: { - integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== } engines: {node: '>= 0.6'} - dependencies: - media-typer: 0.3.0 - mime-types: 2.1.31 - dev: false - /unpipe/1.0.0: - resolution: {integrity: sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=} + raw-body@2.5.2: + resolution: + { + integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== + } + engines: {node: '>= 0.8'} + + raw-body@3.0.0: + resolution: + { + integrity: sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g== + } + engines: {node: '>= 0.8'} + + router@2.0.0: + resolution: + { + integrity: sha512-dIM5zVoG8xhC6rnSN8uoAgFARwTE7BQs8YwHEvK0VCmfxQXMaOuA1uiR1IPwsW7JyK5iTt7Od/TC9StasS2NPQ== + } + engines: {node: '>= 0.10'} + + safe-buffer@5.2.1: + resolution: + { + integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + } + + safer-buffer@2.1.2: + resolution: + { + integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + } + + send@0.18.0: + resolution: + { + integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + } + engines: {node: '>= 0.8.0'} + + send@0.19.0: + resolution: + { + integrity: sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== + } + engines: {node: '>= 0.8.0'} + + send@1.1.0: + resolution: + { + integrity: sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA== + } + engines: {node: '>= 18'} + + serve-static@1.16.2: + resolution: + { + integrity: sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== + } + engines: {node: '>= 0.8.0'} + + serve-static@2.1.0: + resolution: + { + integrity: sha512-A3We5UfEjG8Z7VkDv6uItWw6HY2bBSBJT1KtVESn6EOoOr2jAxNhxWCLY3jDE2WcuHXByWju74ck3ZgLwL8xmA== + } + engines: {node: '>= 18'} + + set-function-length@1.2.2: + resolution: + { + integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + } + engines: {node: '>= 0.4'} + + setprototypeof@1.2.0: + resolution: + { + integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + } + + side-channel@1.0.6: + resolution: + { + integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + } + engines: {node: '>= 0.4'} + + statuses@1.5.0: + resolution: + { + integrity: sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + } + engines: {node: '>= 0.6'} + + statuses@2.0.1: + resolution: + { + integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + } + engines: {node: '>= 0.8'} + + toidentifier@1.0.1: + resolution: + { + integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + } + engines: {node: '>=0.6'} + + type-is@1.6.18: + resolution: + { + integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + } + engines: {node: '>= 0.6'} + + type-is@2.0.0: + resolution: + { + integrity: sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw== + } + engines: {node: '>= 0.6'} + + unpipe@1.0.0: + resolution: + { + integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + } engines: {node: '>= 0.8'} - dev: false - /utils-merge/1.0.1: - resolution: {integrity: sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=} + utils-merge@1.0.1: + resolution: + { + integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + } engines: {node: '>= 0.4.0'} - dev: false - /vary/1.1.2: - resolution: {integrity: sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=} + vary@1.1.2: + resolution: + { + integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + } engines: {node: '>= 0.8'} - dev: false + + wrappy@1.0.2: + resolution: + { + integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + } + +snapshots: + accepts@1.3.8: + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + + accepts@2.0.0: + dependencies: + mime-types: 3.0.0 + negotiator: 1.0.0 + + array-flatten@3.0.0: {} + + body-parser@1.20.3: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.13.0 + raw-body: 2.5.2 + type-is: 1.6.18 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + body-parser@2.0.1: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 3.1.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.5.2 + on-finished: 2.4.1 + qs: 6.13.0 + raw-body: 3.0.0 + type-is: 1.6.18 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + bytes@3.1.2: {} + + call-bind@1.0.7: + dependencies: + es-define-property: 1.0.0 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + set-function-length: 1.2.2 + + content-disposition@0.5.4: + dependencies: + safe-buffer: 5.2.1 + + content-disposition@1.0.0: + dependencies: + safe-buffer: 5.2.1 + + content-type@1.0.5: {} + + cookie-signature@1.2.1: {} + + cookie@0.5.0: {} + + cookie@0.6.0: {} + + debug@2.6.9: + dependencies: + ms: 2.0.0 + + debug@3.1.0: + dependencies: + ms: 2.0.0 + + debug@4.3.6: + dependencies: + ms: 2.1.2 + + debug@4.3.7: + dependencies: + ms: 2.1.3 + + define-data-property@1.1.4: + dependencies: + es-define-property: 1.0.0 + es-errors: 1.3.0 + gopd: 1.0.1 + + depd@1.1.2: {} + + depd@2.0.0: {} + + destroy@1.2.0: {} + + ee-first@1.1.1: {} + + encodeurl@1.0.2: {} + + encodeurl@2.0.0: {} + + es-define-property@1.0.0: + dependencies: + get-intrinsic: 1.2.4 + + es-errors@1.3.0: {} + + escape-html@1.0.3: {} + + etag@1.8.1: {} + + express@5.0.0: + dependencies: + accepts: 2.0.0 + body-parser: 2.0.1 + content-disposition: 1.0.0 + content-type: 1.0.5 + cookie: 0.6.0 + cookie-signature: 1.2.1 + debug: 4.3.6 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.0.0 + fresh: 2.0.0 + http-errors: 2.0.0 + merge-descriptors: 2.0.0 + methods: 1.1.2 + mime-types: 3.0.0 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.13.0 + range-parser: 1.2.1 + router: 2.0.0 + safe-buffer: 5.2.1 + send: 1.1.0 + serve-static: 2.1.0 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: 2.0.0 + utils-merge: 1.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + + finalhandler@1.3.1: + dependencies: + debug: 2.6.9 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + finalhandler@2.0.0: + dependencies: + debug: 2.6.9 + encodeurl: 1.0.2 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + forwarded@0.2.0: {} + + fresh@0.5.2: {} + + fresh@2.0.0: {} + + function-bind@1.1.2: {} + + get-intrinsic@1.2.4: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + has-proto: 1.0.3 + has-symbols: 1.0.3 + hasown: 2.0.2 + + gopd@1.0.1: + dependencies: + get-intrinsic: 1.2.4 + + has-property-descriptors@1.0.2: + dependencies: + es-define-property: 1.0.0 + + has-proto@1.0.3: {} + + has-symbols@1.0.3: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + http-errors@1.8.1: + dependencies: + depd: 1.1.2 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 1.5.0 + toidentifier: 1.0.1 + + http-errors@2.0.0: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + + iconv-lite@0.4.24: + dependencies: + safer-buffer: 2.1.2 + + iconv-lite@0.5.2: + dependencies: + safer-buffer: 2.1.2 + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + inherits@2.0.4: {} + + ipaddr.js@1.9.1: {} + + ipaddr.js@2.2.0: {} + + is-promise@4.0.0: {} + + media-typer@0.3.0: {} + + media-typer@1.1.0: {} + + merge-descriptors@2.0.0: {} + + methods@1.1.2: {} + + mime-db@1.52.0: {} + + mime-db@1.53.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + mime-types@3.0.0: + dependencies: + mime-db: 1.53.0 + + mime@1.6.0: {} + + ms@2.0.0: {} + + ms@2.1.2: {} + + ms@2.1.3: {} + + negotiator@0.6.3: {} + + negotiator@1.0.0: {} + + object-inspect@1.13.2: {} + + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + parseurl@1.3.3: {} + + path-to-regexp@8.1.0: {} + + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + + qs@6.13.0: + dependencies: + side-channel: 1.0.6 + + range-parser@1.2.1: {} + + raw-body@2.5.2: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + unpipe: 1.0.0 + + raw-body@3.0.0: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.6.3 + unpipe: 1.0.0 + + router@2.0.0: + dependencies: + array-flatten: 3.0.0 + is-promise: 4.0.0 + methods: 1.1.2 + parseurl: 1.3.3 + path-to-regexp: 8.1.0 + setprototypeof: 1.2.0 + utils-merge: 1.0.1 + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + send@0.18.0: + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + + send@0.19.0: + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + + send@1.1.0: + dependencies: + debug: 4.3.7 + destroy: 1.2.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime-types: 2.1.35 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + + serve-static@1.16.2: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 0.19.0 + transitivePeerDependencies: + - supports-color + + serve-static@2.1.0: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.1.0 + transitivePeerDependencies: + - supports-color + + set-function-length@1.2.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + gopd: 1.0.1 + has-property-descriptors: 1.0.2 + + setprototypeof@1.2.0: {} + + side-channel@1.0.6: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + object-inspect: 1.13.2 + + statuses@1.5.0: {} + + statuses@2.0.1: {} + + toidentifier@1.0.1: {} + + type-is@1.6.18: + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + + type-is@2.0.0: + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.0 + + unpipe@1.0.0: {} + + utils-merge@1.0.1: {} + + vary@1.1.2: {} + + wrappy@1.0.2: {} diff --git a/__tests__/data/yarn.lock b/__tests__/data/yarn.lock index 97a144a73..96da9bfd9 100644 --- a/__tests__/data/yarn.lock +++ b/__tests__/data/yarn.lock @@ -2,61 +2,111 @@ # yarn lockfile v1 -accepts@~1.3.7: - version "1.3.7" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" - integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== +accepts@^1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== dependencies: - mime-types "~2.1.24" - negotiator "0.6.2" - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= + mime-types "~2.1.34" + negotiator "0.6.3" -body-parser@1.19.0: - version "1.19.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" - integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== +accepts@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-2.0.0.tgz#bbcf4ba5075467f3f2131eab3cffc73c2f5d7895" + integrity sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng== dependencies: - bytes "3.1.0" - content-type "~1.0.4" + mime-types "^3.0.0" + negotiator "^1.0.0" + +array-flatten@3.0.0, array-flatten@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-3.0.0.tgz#6428ca2ee52c7b823192ec600fa3ed2f157cd541" + integrity sha512-zPMVc3ZYlGLNk4mpK1NzP2wg0ml9t7fUgDsayR5Y5rSzxQilzR9FGu/EH2jQOcKSAeAfWeylyW8juy3OkWRvNA== + +body-parser@^1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== + dependencies: + bytes "3.1.2" + content-type "~1.0.5" debug "2.6.9" - depd "~1.1.2" - http-errors "1.7.2" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" iconv-lite "0.4.24" - on-finished "~2.3.0" - qs "6.7.0" - raw-body "2.4.0" - type-is "~1.6.17" + on-finished "2.4.1" + qs "6.13.0" + raw-body "2.5.2" + type-is "~1.6.18" + unpipe "1.0.0" -bytes@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" - integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== +body-parser@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-2.0.1.tgz#979de4a43468c5624403457fd6d45f797faffbaf" + integrity sha512-PagxbjvuPH6tv0f/kdVbFGcb79D236SLcDTs6DrQ7GizJ88S1UWP4nMXFEo/I4fdhGRGabvFfFjVGm3M7U8JwA== + dependencies: + bytes "3.1.2" + content-type "~1.0.5" + debug "3.1.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.5.2" + on-finished "2.4.1" + qs "6.13.0" + raw-body "^3.0.0" + type-is "~1.6.18" + unpipe "1.0.0" + +bytes@3.1.2, bytes@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + +content-disposition@^0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" -content-disposition@0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" - integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== +content-disposition@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-1.0.0.tgz#844426cb398f934caefcbb172200126bc7ceace2" + integrity sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg== dependencies: - safe-buffer "5.1.2" + safe-buffer "5.2.1" -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== +content-type@^1.0.5, content-type@~1.0.4, content-type@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= +cookie-signature@^1.0.7, cookie-signature@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.2.1.tgz#790dea2cce64638c7ae04d9fabed193bd7ccf3b4" + integrity sha512-78KWk9T26NhzXtuL26cIJ8/qNHANyJ/ZYrmEXFzUmhZdjpBv+DlWlOANRTGBt48YcyslsLrj0bMLFTmXvLRCOw== + +cookie@0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== -cookie@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" - integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== +cookie@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== debug@2.6.9: version "2.6.9" @@ -65,116 +115,235 @@ debug@2.6.9: dependencies: ms "2.0.0" +debug@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== + dependencies: + ms "2.0.0" + +debug@4.3.6: + version "4.3.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.6.tgz#2ab2c38fbaffebf8aa95fdfe6d88438c7a13c52b" + integrity sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg== + dependencies: + ms "2.1.2" + +debug@^4.0.0, debug@^4.3.5: + version "4.3.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" + integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== + dependencies: + ms "^2.1.3" + +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + +depd@2.0.0, depd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== -destroy@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" - integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= +destroy@1.2.0, destroy@^1.1.0, destroy@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== -ee-first@1.1.1: +ee-first@1.1.1, ee-first@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +encodeurl@^2.0.0, encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" -escape-html@~1.0.3: +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +escape-html@^1.0.3, escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -etag@~1.8.1: +etag@^1.8.1, etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== -express@^4.17.1: - version "4.17.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" - integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== +express@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/express/-/express-5.0.0.tgz#744f9ec86025a01aeca99e4300aa4fc050d493c7" + integrity sha512-V4UkHQc+B7ldh1YC84HCXHwf60M4BOMvp9rkvTUWCK5apqDC1Esnbid4wm6nFyVuDy8XMfETsJw5lsIGBWyo0A== dependencies: - accepts "~1.3.7" - array-flatten "1.1.1" - body-parser "1.19.0" - content-disposition "0.5.3" + accepts "^2.0.0" + body-parser "^2.0.1" + content-disposition "^1.0.0" content-type "~1.0.4" - cookie "0.4.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "~1.1.2" - encodeurl "~1.0.2" + cookie "0.6.0" + cookie-signature "^1.2.1" + debug "4.3.6" + depd "2.0.0" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "~1.1.2" - fresh "0.5.2" - merge-descriptors "1.0.1" + finalhandler "^2.0.0" + fresh "2.0.0" + http-errors "2.0.0" + merge-descriptors "^2.0.0" methods "~1.1.2" - on-finished "~2.3.0" + mime-types "^3.0.0" + on-finished "2.4.1" + once "1.4.0" parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.5" - qs "6.7.0" + proxy-addr "~2.0.7" + qs "6.13.0" range-parser "~1.2.1" - safe-buffer "5.1.2" - send "0.17.1" - serve-static "1.14.1" - setprototypeof "1.1.1" - statuses "~1.5.0" - type-is "~1.6.18" + router "^2.0.0" + safe-buffer "5.2.1" + send "^1.1.0" + serve-static "^2.1.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "^2.0.0" utils-merge "1.0.1" vary "~1.1.2" -finalhandler@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" - integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== +finalhandler@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== + dependencies: + debug "2.6.9" + encodeurl "~2.0.0" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +finalhandler@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-2.0.0.tgz#9d3c79156dfa798069db7de7dd53bc37546f564b" + integrity sha512-MX6Zo2adDViYh+GcxxB1dpO43eypOGUOL12rLCOTMQv/DfIbpSJUy4oQIIZhVZkH9e+bZWKMon0XHFEju16tkQ== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" - on-finished "~2.3.0" + on-finished "2.4.1" parseurl "~1.3.3" - statuses "~1.5.0" + statuses "2.0.1" unpipe "~1.0.0" -forwarded@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" - integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= +forwarded@0.2.0, forwarded@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== -fresh@0.5.2: +fresh@0.5.2, fresh@^0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== -http-errors@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" - integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== +fresh@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-2.0.0.tgz#8dd7df6a1b3a1b3a5cf186c05a5dd267622635a4" + integrity sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A== + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +get-intrinsic@^1.1.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.1" - statuses ">= 1.5.0 < 2" - toidentifier "1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + +has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +http-errors@2.0.0, http-errors@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" -http-errors@~1.7.2: - version "1.7.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" - integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== +http-errors@^1.8.0: + version "1.8.1" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c" + integrity sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g== dependencies: depd "~1.1.2" inherits "2.0.4" - setprototypeof "1.1.1" + setprototypeof "1.2.0" statuses ">= 1.5.0 < 2" - toidentifier "1.0.0" + toidentifier "1.0.1" iconv-lite@0.4.24: version "0.4.24" @@ -183,12 +352,21 @@ iconv-lite@0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= +iconv-lite@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.5.2.tgz#af6d628dccfb463b7364d97f715e4b74b8c8c2b8" + integrity sha512-kERHXvpSaB4aU3eANwidg79K8FlrN77m8G9V+0vOR3HYaRifrlwMEpT7ZBJqLSEIHnEgJTHcWK82wwLwwKwtag== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@0.6.3, iconv-lite@^0.6.0: + version "0.6.3" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" -inherits@2.0.4: +inherits@2.0.4, inherits@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -198,34 +376,61 @@ ipaddr.js@1.9.1: resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== +ipaddr.js@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.2.0.tgz#d33fa7bac284f4de7af949638c9d68157c6b92e8" + integrity sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA== + +is-promise@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" + integrity sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== + media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= +media-typer@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-1.1.0.tgz#6ab74b8f2d3320f2064b2a87a38e7931ff3a5561" + integrity sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw== + +merge-descriptors@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-2.0.0.tgz#ea922f660635a2249ee565e0449f951e6b603808" + integrity sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g== -methods@~1.1.2: +methods@^1.1.2, methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= - -mime-db@1.47.0: - version "1.47.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.47.0.tgz#8cb313e59965d3c05cfbf898915a267af46a335c" - integrity sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw== + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-db@^1.51.0, mime-db@^1.53.0: + version "1.53.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.53.0.tgz#3cb63cd820fc29896d9d4e8c32ab4fcd74ccb447" + integrity sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg== + +mime-types@^2.1.35, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" -mime-types@~2.1.24: - version "2.1.30" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.30.tgz#6e7be8b4c479825f85ed6326695db73f9305d62d" - integrity sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg== +mime-types@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-3.0.0.tgz#148453a900475522d095a445355c074cca4f5217" + integrity sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w== dependencies: - mime-db "1.47.0" + mime-db "^1.53.0" -mime@1.6.0: +mime@1.6.0, mime@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== @@ -233,118 +438,220 @@ mime@1.6.0: ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" - integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== +ms@2.1.3, ms@^2.1.2, ms@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== -negotiator@0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" - integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== +negotiator@0.6.3, negotiator@^0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= +negotiator@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-1.0.0.tgz#b6c91bb47172d69f93cfd7c357bbb529019b5f6a" + integrity sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg== + +object-inspect@^1.13.1: + version "1.13.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff" + integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== + +on-finished@2.4.1, on-finished@^2.4.0, on-finished@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== dependencies: ee-first "1.1.1" -parseurl@~1.3.3: +once@1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +parseurl@^1.3.3, parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= +path-to-regexp@^8.0.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-8.1.0.tgz#4d687606ed0be8ed512ba802eb94d620cb1a86f0" + integrity sha512-Bqn3vc8CMHty6zuD+tG23s6v2kwxslHEhTj4eYaVKGIEB+YX/2wd0/rgXLFD9G9id9KCtbVy/3ZgmvZjpa0UdQ== -proxy-addr@~2.0.5: - version "2.0.6" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf" - integrity sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw== +proxy-addr@^2.0.7, proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== dependencies: - forwarded "~0.1.2" + forwarded "0.2.0" ipaddr.js "1.9.1" -qs@6.7.0: - version "6.7.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" - integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== +qs@6.13.0, qs@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== + dependencies: + side-channel "^1.0.6" -range-parser@~1.2.1: +range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -raw-body@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" - integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== +raw-body@2.5.2, raw-body@^2.4.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== dependencies: - bytes "3.1.0" - http-errors "1.7.2" + bytes "3.1.2" + http-errors "2.0.0" iconv-lite "0.4.24" unpipe "1.0.0" -safe-buffer@5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== +raw-body@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-3.0.0.tgz#25b3476f07a51600619dae3fe82ddc28a36e5e0f" + integrity sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.6.3" + unpipe "1.0.0" -"safer-buffer@>= 2.1.2 < 3": +router@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/router/-/router-2.0.0.tgz#8692720b95de83876870d7bc638dd3c7e1ae8a27" + integrity sha512-dIM5zVoG8xhC6rnSN8uoAgFARwTE7BQs8YwHEvK0VCmfxQXMaOuA1uiR1IPwsW7JyK5iTt7Od/TC9StasS2NPQ== + dependencies: + array-flatten "3.0.0" + is-promise "4.0.0" + methods "~1.1.2" + parseurl "~1.3.3" + path-to-regexp "^8.0.0" + setprototypeof "1.2.0" + utils-merge "1.0.1" + +safe-buffer@5.2.1, safe-buffer@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -send@0.17.1: - version "0.17.1" - resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" - integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== +send@0.19.0, send@^0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" - depd "~1.1.2" - destroy "~1.0.4" + depd "2.0.0" + destroy "1.2.0" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" - http-errors "~1.7.2" + http-errors "2.0.0" mime "1.6.0" - ms "2.1.1" - on-finished "~2.3.0" + ms "2.1.3" + on-finished "2.4.1" range-parser "~1.2.1" - statuses "~1.5.0" + statuses "2.0.1" -serve-static@1.14.1: - version "1.14.1" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" - integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== +send@^1.0.0, send@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/send/-/send-1.1.0.tgz#4efe6ff3bb2139b0e5b2648d8b18d4dec48fc9c5" + integrity sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA== dependencies: - encodeurl "~1.0.2" + debug "^4.3.5" + destroy "^1.2.0" + encodeurl "^2.0.0" + escape-html "^1.0.3" + etag "^1.8.1" + fresh "^0.5.2" + http-errors "^2.0.0" + mime-types "^2.1.35" + ms "^2.1.3" + on-finished "^2.4.1" + range-parser "^1.2.1" + statuses "^2.0.1" + +serve-static@^1.15.0: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== + dependencies: + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.17.1" + send "0.19.0" -setprototypeof@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" - integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== +serve-static@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-2.1.0.tgz#1b4eacbe93006b79054faa4d6d0a501d7f0e84e2" + integrity sha512-A3We5UfEjG8Z7VkDv6uItWw6HY2bBSBJT1KtVESn6EOoOr2jAxNhxWCLY3jDE2WcuHXByWju74ck3ZgLwL8xmA== + dependencies: + encodeurl "^2.0.0" + escape-html "^1.0.3" + parseurl "^1.3.3" + send "^1.0.0" + +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + +setprototypeof@1.2.0, setprototypeof@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + +statuses@2.0.1, statuses@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== -"statuses@>= 1.5.0 < 2", statuses@~1.5.0: +"statuses@>= 1.5.0 < 2": version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== -toidentifier@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" - integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== -type-is@~1.6.17, type-is@~1.6.18: +type-is@^1.6.18, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== @@ -352,17 +659,31 @@ type-is@~1.6.17, type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" +type-is@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-2.0.0.tgz#7d249c2e2af716665cc149575dadb8b3858653af" + integrity sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw== + dependencies: + content-type "^1.0.5" + media-typer "^1.1.0" + mime-types "^3.0.0" + unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== -utils-merge@1.0.1: +utils-merge@1.0.1, utils-merge@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== -vary@~1.1.2: +vary@^1.1.2, vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index 4ab686cde..5cdc83e03 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -1,7 +1,7 @@ /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ -/***/ 7799: +/***/ 27799: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -40,14 +40,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const path = __importStar(__nccwpck_require__(1017)); -const utils = __importStar(__nccwpck_require__(1518)); -const cacheHttpClient = __importStar(__nccwpck_require__(8245)); -const tar_1 = __nccwpck_require__(6490); -const options_1 = __nccwpck_require__(6215); -const requestUtils_1 = __nccwpck_require__(3981); -const downloadUtils_1 = __nccwpck_require__(5500); +const core = __importStar(__nccwpck_require__(42186)); +const path = __importStar(__nccwpck_require__(71017)); +const utils = __importStar(__nccwpck_require__(91518)); +const cacheHttpClient = __importStar(__nccwpck_require__(98245)); +const tar_1 = __nccwpck_require__(56490); +const options_1 = __nccwpck_require__(76215); +const requestUtils_1 = __nccwpck_require__(13981); +const downloadUtils_1 = __nccwpck_require__(55500); class ValidationError extends Error { constructor(message) { super(message); @@ -99,7 +99,7 @@ exports.isFeatureAvailable = isFeatureAvailable; * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false, enableCrossArchArchive = false) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t; + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(primaryKey); @@ -210,6 +210,30 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch core.info('Cache restored successfully'); break; } + case 'azure_blob': { + if (!((_u = cacheEntry.azure_blob) === null || _u === void 0 ? void 0 : _u.pre_signed_url)) { + return undefined; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheKey; + } + try { + yield cacheHttpClient.downloadCache(cacheEntry.provider, (_v = cacheEntry.azure_blob) === null || _v === void 0 ? void 0 : _v.pre_signed_url, archivePath); + } + catch (error) { + core.info('Cache Miss. Failed to download cache.'); + return undefined; + } + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + break; + } } return cacheKey; } @@ -246,7 +270,7 @@ exports.restoreCache = restoreCache; * @returns string returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, enableCrossOsArchive = false, enableCrossArchArchive = false) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y; + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); @@ -299,6 +323,13 @@ function saveCache(paths, key, enableCrossOsArchive = false, enableCrossArchArch // S3 Params are undefined for GCS undefined, undefined, undefined, undefined, (_s = (_r = (_q = (_p = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _p === void 0 ? void 0 : _p.gcs) === null || _q === void 0 ? void 0 : _q.short_lived_token) === null || _r === void 0 ? void 0 : _r.access_token) !== null && _s !== void 0 ? _s : '', (_v = (_u = (_t = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _t === void 0 ? void 0 : _t.gcs) === null || _u === void 0 ? void 0 : _u.bucket_name) !== null && _v !== void 0 ? _v : '', (_y = (_x = (_w = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _w === void 0 ? void 0 : _w.gcs) === null || _x === void 0 ? void 0 : _x.cache_key) !== null && _y !== void 0 ? _y : ''); break; + case 'azure_blob': + core.debug(`Saving Cache to Azure Blob`); + cacheKey = yield cacheHttpClient.saveCache('azure_blob', key, cacheVersion, archivePath, + // S3 Params are undefined for GCS + undefined, undefined, undefined, undefined, + // GCS Params are undefined for Azure Blob + undefined, undefined, undefined, (_1 = (_0 = (_z = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _z === void 0 ? void 0 : _z.azure_blob) === null || _0 === void 0 ? void 0 : _0.pre_signed_url) !== null && _1 !== void 0 ? _1 : ''); } } catch (error) { @@ -350,7 +381,7 @@ exports.deleteCache = deleteCache; /***/ }), -/***/ 8245: +/***/ 98245: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -392,17 +423,19 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.reserveCache = exports.downloadCacheStreaming = exports.downloadCacheSingleThread = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); +const core = __importStar(__nccwpck_require__(42186)); +const github = __importStar(__nccwpck_require__(28294)); +const http_client_1 = __nccwpck_require__(96255); +const auth_1 = __nccwpck_require__(35526); const crypto = __importStar(__nccwpck_require__(6113)); -const utils = __importStar(__nccwpck_require__(1518)); -const os_1 = __importDefault(__nccwpck_require__(2037)); -const downloadUtils_1 = __nccwpck_require__(5500); -const requestUtils_1 = __nccwpck_require__(3981); -const storage_1 = __nccwpck_require__(7577); +const utils = __importStar(__nccwpck_require__(91518)); +const os_1 = __importDefault(__nccwpck_require__(22037)); +const downloadUtils_1 = __nccwpck_require__(55500); +const requestUtils_1 = __nccwpck_require__(13981); +const storage_1 = __nccwpck_require__(27577); const uploadUtils_1 = __nccwpck_require__(1786); -const google_auth_library_1 = __nccwpck_require__(810); +const google_auth_library_1 = __nccwpck_require__(20810); +const storage_blob_1 = __nccwpck_require__(84100); const versionSalt = '1.0'; function getCacheApiUrl(resource) { var _a; @@ -466,6 +499,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false, if (process.platform === 'win32' && !enableCrossOsArchive) { components.push('windows-only'); } + // Check for mac platforms if enableCrossOsArchive is false + if (process.platform === 'darwin' && !enableCrossOsArchive) { + components.push('mac-only'); + } // Add architecture to cache version if (!enableCrossArchArchive) { components.push(process.arch); @@ -476,16 +513,49 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false, } exports.getCacheVersion = getCacheVersion; function getCacheEntry(key, restoreKeys, paths, options) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive, options === null || options === void 0 ? void 0 : options.enableCrossArchArchive); + const restoreBranches = new Set(); + const restoreRepos = new Set(); + switch (github.context.eventName) { + case 'pull_request': + { + const pullPayload = github.context.payload; + // Adds PR head branch and base branch to restoreBranches + restoreBranches.add(`refs/heads/${(_b = (_a = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _a === void 0 ? void 0 : _a.head) === null || _b === void 0 ? void 0 : _b.ref}`); + restoreBranches.add(`refs/heads/${(_d = (_c = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _c === void 0 ? void 0 : _c.base) === null || _d === void 0 ? void 0 : _d.ref}`); + // Adds default branch to restoreBranches + restoreBranches.add(`refs/heads/${(_e = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.repository) === null || _e === void 0 ? void 0 : _e.default_branch}`); + // If head points to a different repository, add it to restoreRepos. We allow restores from head repos as well. + if (((_h = (_g = (_f = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _f === void 0 ? void 0 : _f.head) === null || _g === void 0 ? void 0 : _g.repo) === null || _h === void 0 ? void 0 : _h.name) !== + ((_j = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.repository) === null || _j === void 0 ? void 0 : _j.name)) { + restoreRepos.add((_m = (_l = (_k = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _k === void 0 ? void 0 : _k.head) === null || _l === void 0 ? void 0 : _l.repo) === null || _m === void 0 ? void 0 : _m.name); + } + } + break; + case 'push': + case 'workflow_dispatch': + { + const pushPayload = github.context.payload; + // Default branch is not in the complete format + // Ref: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push + restoreBranches.add(`refs/heads/${(_o = pushPayload === null || pushPayload === void 0 ? void 0 : pushPayload.repository) === null || _o === void 0 ? void 0 : _o.default_branch}`); + } + break; + default: + break; + } const getCacheRequest = { cache_key: key, restore_keys: restoreKeys, cache_version: version, vcs_repository: getVCSRepository(), vcs_ref: getVCSRef(), - annotations: getAnnotations() + annotations: getAnnotations(), + restore_branches: Array.from(restoreBranches), + restore_repos: Array.from(restoreRepos) }; const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('cache/get'), getCacheRequest); @@ -554,6 +624,11 @@ function downloadCache(provider, archiveLocation, archivePath, gcsToken) { yield (0, downloadUtils_1.downloadCacheMultipartGCP)(storage, archiveLocation, archivePath); break; } + case 'azure_blob': { + const blockBlobClient = new storage_blob_1.BlockBlobClient(archiveLocation); + yield blockBlobClient.downloadToFile(archivePath); + break; + } } }); } @@ -644,8 +719,8 @@ function commitCache(cacheKey, cacheVersion, uploadKey, uploadID, parts) { })); }); } -function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3UploadKey, S3NumberOfChunks, S3PreSignedURLs, GCSAuthToken, GCSBucketName, GCSObjectName) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j; +function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3UploadKey, S3NumberOfChunks, S3PreSignedURLs, GCSAuthToken, GCSBucketName, GCSObjectName, AzureSASURL) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t; return __awaiter(this, void 0, void 0, function* () { const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); @@ -674,7 +749,8 @@ function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3 completedParts.sort((a, b) => a.PartNumber - b.PartNumber); core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion, S3UploadKey, S3UploadId, completedParts); - cacheKeyResponse = (_c = (_b = (_a = commitCacheResponse.result) === null || _a === void 0 ? void 0 : _a.s3) === null || _b === void 0 ? void 0 : _b.cache_key) !== null && _c !== void 0 ? _c : ''; + cacheKeyResponse = + (_f = (_c = (_b = (_a = commitCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cache_entry) === null || _b === void 0 ? void 0 : _b.cache_user_given_key) !== null && _c !== void 0 ? _c : (_e = (_d = commitCacheResponse.result) === null || _d === void 0 ? void 0 : _d.s3) === null || _e === void 0 ? void 0 : _e.cache_key) !== null && _f !== void 0 ? _f : ''; break; } case 'gcs': { @@ -691,7 +767,23 @@ function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3 core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion); cacheKeyResponse = - (_j = (_f = (_e = (_d = commitCacheResponse.result) === null || _d === void 0 ? void 0 : _d.cache_entry) === null || _e === void 0 ? void 0 : _e.cache_user_given_key) !== null && _f !== void 0 ? _f : (_h = (_g = commitCacheResponse.result) === null || _g === void 0 ? void 0 : _g.gcs) === null || _h === void 0 ? void 0 : _h.cache_key) !== null && _j !== void 0 ? _j : ''; + (_m = (_j = (_h = (_g = commitCacheResponse.result) === null || _g === void 0 ? void 0 : _g.cache_entry) === null || _h === void 0 ? void 0 : _h.cache_user_given_key) !== null && _j !== void 0 ? _j : (_l = (_k = commitCacheResponse.result) === null || _k === void 0 ? void 0 : _k.gcs) === null || _l === void 0 ? void 0 : _l.cache_key) !== null && _m !== void 0 ? _m : ''; + break; + } + case 'azure_blob': { + if (!AzureSASURL) { + core.debug(`Azure SAS URL is not set. SAS URL: ${AzureSASURL}`); + throw new Error('Unable to upload cache to Azure Blob. SAS URL is not provided.'); + } + core.debug('Uploading cache'); + const blockBlobClient = new storage_blob_1.BlockBlobClient(AzureSASURL); + yield blockBlobClient.uploadFile(archivePath, { + maxSingleShotSize: 10 * 1024 * 1024 // 10 MB + }); + core.debug('Committing cache'); + commitCacheResponse = yield commitCache(cacheKey, cacheVersion); + cacheKeyResponse = + (_t = (_q = (_p = (_o = commitCacheResponse.result) === null || _o === void 0 ? void 0 : _o.cache_entry) === null || _p === void 0 ? void 0 : _p.cache_user_given_key) !== null && _q !== void 0 ? _q : (_s = (_r = commitCacheResponse.result) === null || _r === void 0 ? void 0 : _r.azure_blob) === null || _s === void 0 ? void 0 : _s.cache_key) !== null && _t !== void 0 ? _t : ''; break; } } @@ -726,7 +818,7 @@ exports.deleteCache = deleteCache; /***/ }), -/***/ 1518: +/***/ 91518: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -772,16 +864,16 @@ var __asyncValues = (this && this.__asyncValues) || function (o) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retrieveGCSBucketAndObjectName = exports.streamToBuffer = exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const glob = __importStar(__nccwpck_require__(1597)); -const io = __importStar(__nccwpck_require__(7436)); -const fs = __importStar(__nccwpck_require__(7147)); -const path = __importStar(__nccwpck_require__(1017)); +const core = __importStar(__nccwpck_require__(42186)); +const exec = __importStar(__nccwpck_require__(71514)); +const glob = __importStar(__nccwpck_require__(31597)); +const io = __importStar(__nccwpck_require__(47351)); +const fs = __importStar(__nccwpck_require__(57147)); +const path = __importStar(__nccwpck_require__(71017)); const semver = __importStar(__nccwpck_require__(3771)); -const util = __importStar(__nccwpck_require__(3837)); -const uuid_1 = __nccwpck_require__(4138); -const constants_1 = __nccwpck_require__(8840); +const util = __importStar(__nccwpck_require__(73837)); +const uuid_1 = __nccwpck_require__(94138); +const constants_1 = __nccwpck_require__(88840); // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 function createTempDirectory() { return __awaiter(this, void 0, void 0, function* () { @@ -953,7 +1045,7 @@ exports.retrieveGCSBucketAndObjectName = retrieveGCSBucketAndObjectName; /***/ }), -/***/ 8840: +/***/ 88840: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -996,7 +1088,7 @@ exports.ManifestFilename = 'manifest.txt'; /***/ }), -/***/ 5500: +/***/ 55500: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1035,16 +1127,16 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDownloadCommandPipeForWget = exports.downloadCacheStreamingGCP = exports.downloadCacheGCP = exports.downloadCacheMultipartGCP = exports.downloadCacheMultiConnection = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const http_client_1 = __nccwpck_require__(6255); -const fs = __importStar(__nccwpck_require__(7147)); -const stream = __importStar(__nccwpck_require__(2781)); -const util = __importStar(__nccwpck_require__(3837)); -const utils = __importStar(__nccwpck_require__(1518)); -const constants_1 = __nccwpck_require__(8840); -const requestUtils_1 = __nccwpck_require__(3981); -const storage_1 = __nccwpck_require__(7577); -const child_process_1 = __nccwpck_require__(2081); +const core = __importStar(__nccwpck_require__(42186)); +const http_client_1 = __nccwpck_require__(96255); +const fs = __importStar(__nccwpck_require__(57147)); +const stream = __importStar(__nccwpck_require__(12781)); +const util = __importStar(__nccwpck_require__(73837)); +const utils = __importStar(__nccwpck_require__(91518)); +const constants_1 = __nccwpck_require__(88840); +const requestUtils_1 = __nccwpck_require__(13981); +const storage_1 = __nccwpck_require__(27577); +const child_process_1 = __nccwpck_require__(32081); /** * Pipes the body of a HTTP response to a stream * @@ -1345,7 +1437,7 @@ exports.getDownloadCommandPipeForWget = getDownloadCommandPipeForWget; /***/ }), -/***/ 3981: +/***/ 13981: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1384,9 +1476,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const http_client_1 = __nccwpck_require__(6255); -const constants_1 = __nccwpck_require__(8840); +const core = __importStar(__nccwpck_require__(42186)); +const http_client_1 = __nccwpck_require__(96255); +const constants_1 = __nccwpck_require__(88840); function isSuccessStatusCode(statusCode) { if (!statusCode) { return false; @@ -1490,7 +1582,7 @@ exports.retryHttpClientResponse = retryHttpClientResponse; /***/ }), -/***/ 6490: +/***/ 56490: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1529,13 +1621,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createTar = exports.extractStreamingTar = exports.extractTar = exports.listTar = void 0; -const exec_1 = __nccwpck_require__(1514); -const io = __importStar(__nccwpck_require__(7436)); -const fs_1 = __nccwpck_require__(7147); -const path = __importStar(__nccwpck_require__(1017)); -const utils = __importStar(__nccwpck_require__(1518)); -const constants_1 = __nccwpck_require__(8840); -const child_process_1 = __nccwpck_require__(2081); +const exec_1 = __nccwpck_require__(71514); +const io = __importStar(__nccwpck_require__(47351)); +const fs_1 = __nccwpck_require__(57147); +const path = __importStar(__nccwpck_require__(71017)); +const utils = __importStar(__nccwpck_require__(91518)); +const constants_1 = __nccwpck_require__(88840); +const child_process_1 = __nccwpck_require__(32081); const IS_WINDOWS = process.platform === 'win32'; var TAR_MODE; (function (TAR_MODE) { @@ -1966,11 +2058,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.multiPartUploadToGCS = exports.uploadFileToS3 = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const utils = __importStar(__nccwpck_require__(1518)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const axios_1 = __importDefault(__nccwpck_require__(8757)); -const storage_1 = __nccwpck_require__(7577); +const core = __importStar(__nccwpck_require__(42186)); +const utils = __importStar(__nccwpck_require__(91518)); +const os = __importStar(__nccwpck_require__(22037)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const axios_1 = __importDefault(__nccwpck_require__(88757)); +const storage_1 = __nccwpck_require__(27577); function getContentRange(start, end) { // Format: `bytes start-end/filesize // start and end are inclusive @@ -1980,7 +2073,7 @@ function getContentRange(start, end) { return `bytes ${start}-${end}/*`; } function uploadChunk(resourceUrl, openStream, partNumber, start, end) { - var _a; + var _a, _b; return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); // Manually convert the readable stream to a buffer. S3 doesn't allow stream as input @@ -2001,7 +2094,7 @@ function uploadChunk(resourceUrl, openStream, partNumber, start, end) { }; } catch (error) { - throw new Error(`Cache service responded with ${error.status} during upload chunk.`); + throw new Error(`Cache service responded with ${(_b = error.response) === null || _b === void 0 ? void 0 : _b.status} during upload chunk.`); } }); } @@ -2009,26 +2102,39 @@ function uploadFileToS3(preSignedURLs, archivePath) { return __awaiter(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const numberOfChunks = preSignedURLs.length; + let concurrency = 4; + // Adjust concurrency based on the number of cpu cores + if (os.cpus().length > 4) { + concurrency = 8; + } const fd = fs_1.default.openSync(archivePath, 'r'); - core.debug('Awaiting all uploads'); + core.debug(`Awaiting all uploads with concurrency limit of ${concurrency}`); let offset = 0; + const completedParts = []; try { - const completedParts = yield Promise.all(preSignedURLs.map((presignedURL, index) => __awaiter(this, void 0, void 0, function* () { - const chunkSize = Math.ceil(fileSize / numberOfChunks); - const start = offset; - const end = offset + chunkSize - 1; - offset += chunkSize; - return yield uploadChunk(presignedURL, () => fs_1.default - .createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }) - .on('error', error => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), index + 1, start, end); - }))); + for (let i = 0; i < numberOfChunks; i += concurrency) { + const batch = preSignedURLs + .slice(i, i + concurrency) + .map((presignedURL, index) => { + const chunkIndex = i + index; + const chunkSize = Math.ceil(fileSize / numberOfChunks); + const start = offset; + const end = offset + chunkSize - 1; + offset += chunkSize; + return uploadChunk(presignedURL, () => fs_1.default + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), chunkIndex + 1, start, end); + }); + const batchResults = yield Promise.all(batch); + completedParts.push(...batchResults); + } return completedParts; } finally { @@ -2061,7 +2167,7 @@ exports.multiPartUploadToGCS = multiPartUploadToGCS; /***/ }), -/***/ 6215: +/***/ 76215: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2091,7 +2197,7 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDownloadOptions = exports.getUploadOptions = void 0; -const core = __importStar(__nccwpck_require__(2186)); +const core = __importStar(__nccwpck_require__(42186)); /** * Returns a copy of the upload options with defaults filled in. * @@ -2168,47 +2274,80 @@ exports.getDownloadOptions = getDownloadOptions; /***/ }), -/***/ 1597: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 40333: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.create = void 0; -const internal_globber_1 = __nccwpck_require__(7341); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(57147); +const os_1 = __nccwpck_require__(22037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } } -exports.create = create; -//# sourceMappingURL=glob.js.map +exports.Context = Context; +//# sourceMappingURL=context.js.map /***/ }), -/***/ 9350: +/***/ 28294: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -2221,51 +2360,42 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOptions = void 0; -const core = __importStar(__nccwpck_require__(2186)); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(40333)); +const utils_1 = __nccwpck_require__(30849); +exports.context = new Context.Context(); /** - * Returns a copy with defaults filled in. + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } - } - return result; +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map /***/ }), -/***/ 7341: +/***/ 28189: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -2278,7 +2408,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -2291,241 +2421,470 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultGlobber = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const fs = __importStar(__nccwpck_require__(7147)); -const globOptionsHelper = __importStar(__nccwpck_require__(9350)); -const path = __importStar(__nccwpck_require__(1017)); -const patternHelper = __importStar(__nccwpck_require__(5186)); -const internal_match_kind_1 = __nccwpck_require__(836); -const internal_pattern_1 = __nccwpck_require__(5343); -const internal_search_state_1 = __nccwpck_require__(8530); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); - } - glob() { - var e_1, _a; - return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } - } - return result; - }); - } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); - } - } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory) { - yield yield __await(item.path); - } - // Descend? - else if (!partialMatch) { - continue; - } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); - } - } - }); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(96255)); +const undici_1 = __nccwpck_require__(41773); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 30849: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); - } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; - } - // Pattern - else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(40333)); +const Utils = __importStar(__nccwpck_require__(28189)); +// octokit + plugins +const core_1 = __nccwpck_require__(15552); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(16297); +const plugin_paginate_rest_1 = __nccwpck_require__(19655); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } - static stat(item, options, traversalChain) { - return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); - } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } - else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); - } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; - } - // Update the traversal chain - traversalChain.push(realPath); - } - return stats; - }); +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; } + return opts; } -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 836: -/***/ ((__unused_webpack_module, exports) => { +/***/ 31597: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MatchKind = void 0; +exports.create = void 0; +const internal_globber_1 = __nccwpck_require__(27341); /** - * Indicates whether a pattern matches a path + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map /***/ }), -/***/ 22: +/***/ 99350: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOptions = void 0; +const core = __importStar(__nccwpck_require__(42186)); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), + +/***/ 27341: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultGlobber = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const fs = __importStar(__nccwpck_require__(57147)); +const globOptionsHelper = __importStar(__nccwpck_require__(99350)); +const path = __importStar(__nccwpck_require__(71017)); +const patternHelper = __importStar(__nccwpck_require__(95186)); +const internal_match_kind_1 = __nccwpck_require__(80836); +const internal_pattern_1 = __nccwpck_require__(35343); +const internal_search_state_1 = __nccwpck_require__(28530); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map + +/***/ }), + +/***/ 80836: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), + +/***/ 20022: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2554,8 +2913,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; -const path = __importStar(__nccwpck_require__(1017)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); +const path = __importStar(__nccwpck_require__(71017)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); const IS_WINDOWS = process.platform === 'win32'; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. @@ -2730,7 +3089,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; /***/ }), -/***/ 9413: +/***/ 19413: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2759,9 +3118,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Path = void 0; -const path = __importStar(__nccwpck_require__(1017)); -const pathHelper = __importStar(__nccwpck_require__(22)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); +const path = __importStar(__nccwpck_require__(71017)); +const pathHelper = __importStar(__nccwpck_require__(20022)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); const IS_WINDOWS = process.platform === 'win32'; /** * Helper class for parsing paths into segments @@ -2850,7 +3209,7 @@ exports.Path = Path; /***/ }), -/***/ 5186: +/***/ 95186: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2876,8 +3235,8 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; -const pathHelper = __importStar(__nccwpck_require__(22)); -const internal_match_kind_1 = __nccwpck_require__(836); +const pathHelper = __importStar(__nccwpck_require__(20022)); +const internal_match_kind_1 = __nccwpck_require__(80836); const IS_WINDOWS = process.platform === 'win32'; /** * Given an array of patterns, returns an array of paths to search. @@ -2951,7 +3310,7 @@ exports.partialMatch = partialMatch; /***/ }), -/***/ 5343: +/***/ 35343: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2980,13 +3339,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Pattern = void 0; -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const pathHelper = __importStar(__nccwpck_require__(22)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const minimatch_1 = __nccwpck_require__(3973); -const internal_match_kind_1 = __nccwpck_require__(836); -const internal_path_1 = __nccwpck_require__(9413); +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const pathHelper = __importStar(__nccwpck_require__(20022)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const minimatch_1 = __nccwpck_require__(83973); +const internal_match_kind_1 = __nccwpck_require__(80836); +const internal_path_1 = __nccwpck_require__(19413); const IS_WINDOWS = process.platform === 'win32'; class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { @@ -3213,7 +3572,7 @@ exports.Pattern = Pattern; /***/ }), -/***/ 8530: +/***/ 28530: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -3231,20508 +3590,52544 @@ exports.SearchState = SearchState; /***/ }), -/***/ 3771: -/***/ ((module, exports) => { +/***/ 24101: +/***/ ((module) => { -exports = module.exports = SemVer +"use strict"; -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } -} else { - debug = function () {} -} + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} -var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -// The actual regexps go on exports.re -var re = exports.re = [] -var safeRe = exports.safeRe = [] -var src = exports.src = [] -var t = exports.tokens = {} -var R = 0 -function tok (n) { - t[n] = R++ -} +/***/ }), -var LETTERDASHNUMBER = '[a-zA-Z0-9-]' +/***/ 15552: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -var safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] +"use strict"; -function makeSafeRe (value) { - for (var i = 0; i < safeRegexReplacements.length; i++) { - var token = safeRegexReplacements[i][0] - var max = safeRegexReplacements[i][1] - value = value - .split(token + '*').join(token + '{0,' + max + '}') - .split(token + '+').join(token + '{1,' + max + '}') +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } - return value -} + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(45030); +var import_before_after_hook = __nccwpck_require__(83682); +var import_request = __nccwpck_require__(77434); +var import_graphql = __nccwpck_require__(80543); +var import_auth_token = __nccwpck_require__(24101); + +// pkg/dist-src/version.js +var VERSION = "5.2.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. -tok('NUMERICIDENTIFIER') -src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' -tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' +/***/ }), -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. +/***/ 62777: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' +"use strict"; -// ## Main Version -// Three dot-separated numeric identifiers. +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -tok('MAINVERSION') -src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')' +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); -tok('MAINVERSIONLOOSE') -src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(45030); -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. +// pkg/dist-src/version.js +var VERSION = "9.0.5"; -tok('PRERELEASEIDENTIFIER') -src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; -tok('PRERELEASEIDENTIFIERLOOSE') -src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} -tok('PRERELEASE') -src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} -tok('PRERELEASELOOSE') -src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; +} -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} -tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} -tok('BUILD') -src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + - '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} -tok('FULL') -tok('FULLPLAIN') -src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + - src[t.PRERELEASE] + '?' + - src[t.BUILD] + '?' +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -tok('LOOSEPLAIN') -src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + - src[t.PRERELEASELOOSE] + '?' + - src[t.BUILD] + '?' +/***/ }), -tok('LOOSE') -src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' +/***/ 80543: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -tok('GTLT') -src[t.GTLT] = '((?:<|>)?=?)' +"use strict"; -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -tok('XRANGEIDENTIFIERLOOSE') -src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -tok('XRANGEIDENTIFIER') -src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(77434); +var import_universal_user_agent = __nccwpck_require__(45030); -tok('XRANGEPLAIN') -src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:' + src[t.PRERELEASE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' +// pkg/dist-src/version.js +var VERSION = "7.1.0"; -tok('XRANGEPLAINLOOSE') -src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[t.PRERELEASELOOSE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(77434); -tok('XRANGE') -src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' -tok('XRANGELOOSE') -src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(77434); -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -tok('COERCE') -src[t.COERCE] = '(^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -tok('COERCERTL') -re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') -safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; -// Tilde ranges. -// Meaning is "reasonably at or greater than" -tok('LONETILDE') -src[t.LONETILDE] = '(?:~>?)' +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} -tok('TILDETRIM') -src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' -re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') -safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') -var tildeTrimReplace = '$1~' +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} -tok('TILDE') -src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' -tok('TILDELOOSE') -src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -// Caret ranges. -// Meaning is "at least and backwards compatible with" -tok('LONECARET') -src[t.LONECARET] = '(?:\\^)' -tok('CARETTRIM') -src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' -re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') -safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') -var caretTrimReplace = '$1^' +/***/ }), -tok('CARET') -src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' -tok('CARETLOOSE') -src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' +/***/ 19655: +/***/ ((module) => { -// A simple gt/lt/eq thing, or just "" to indicate "any version" -tok('COMPARATORLOOSE') -src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' -tok('COMPARATOR') -src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' +"use strict"; -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -tok('COMPARATORTRIM') -src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + - '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); -// this one has to use the /g flag -re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') -safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') -var comparatorTrimReplace = '$1$2$3' +// pkg/dist-src/version.js +var VERSION = "9.2.1"; -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -tok('HYPHENRANGE') -src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAIN] + ')' + - '\\s*$' +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} -tok('HYPHENRANGELOOSE') -src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s*$' +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} -// Star ranges basically just allow anything at all. -tok('STAR') -src[t.STAR] = '(<|>)?=?\\s*\\*' +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/organization-roles/{role_id}/teams", + "GET /orgs/{org}/organization-roles/{role_id}/users", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; - // Replace all greedy whitespace to prevent regex dos issues. These regex are - // used internally via the safeRe object since all inputs in this library get - // normalized first to trim and collapse all extra whitespace. The original - // regexes are exported for userland consumption and lower level usage. A - // future breaking change could export the safer regex only with a note that - // all input should have extra whitespace removed. - safeRe[i] = new RegExp(makeSafeRe(src[i])) +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; } } -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); - if (version instanceof SemVer) { - return version - } - if (typeof version !== 'string') { - return null - } +/***/ }), - if (version.length > MAX_LENGTH) { - return null - } +/***/ 16297: +/***/ ((module) => { - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] - if (!r.test(version)) { - return null - } +"use strict"; - try { - return new SemVer(version, options) - } catch (er) { - return null +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } -} + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} +// pkg/dist-src/version.js +var VERSION = "10.4.1"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + assignTeamToOrgRole: [ + "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + assignUserToOrgRole: [ + "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteCustomOrganizationRole: [ + "DELETE /orgs/{org}/organization-roles/{role_id}" + ], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], + listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], + listOrgRoles: ["GET /orgs/{org}/organization-roles"], + listOrganizationFineGrainedPermissions: [ + "GET /orgs/{org}/organization-fine-grained-permissions" + ], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + patchCustomOrganizationRole: [ + "PATCH /orgs/{org}/organization-roles/{role_id}" + ], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + revokeAllOrgRolesTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" + ], + revokeAllOrgRolesUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}" + ], + revokeOrgRoleTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + revokeOrgRoleUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + cancelPagesDeployment: [ + "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateCustomPropertiesValues: [ + "PATCH /repos/{owner}/{repo}/properties/values" + ], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesDeployment: [ + "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" + ], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createFork: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" + ], + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 21366: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(58932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 77434: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(62777); +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "8.4.0"; + +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(21366); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 3771: +/***/ ((module, exports) => { + +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ +} + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} exports.SemVer = SemVer -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[t.STAR], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) +} + + +/***/ }), + +/***/ 94138: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var v1 = __nccwpck_require__(61610); +var v4 = __nccwpck_require__(8373); + +var uuid = v4; +uuid.v1 = v1; +uuid.v4 = v4; + +module.exports = uuid; + + +/***/ }), + +/***/ 65694: +/***/ ((module) => { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 34069: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __nccwpck_require__(6113); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 61610: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(34069); +var bytesToUuid = __nccwpck_require__(65694); + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; +var _clockseq; + +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; + +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; + + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : bytesToUuid(b); +} + +module.exports = v1; + + +/***/ }), + +/***/ 8373: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(34069); +var bytesToUuid = __nccwpck_require__(65694); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 87351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const utils_1 = __nccwpck_require__(5278); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 42186: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(87351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(5278); +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const oidc_utils_1 = __nccwpck_require__(98041); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + } + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(81327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(81327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 717: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(57147)); +const os = __importStar(__nccwpck_require__(22037)); +const uuid_1 = __nccwpck_require__(78974); +const utils_1 = __nccwpck_require__(5278); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 98041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(96255); +const auth_1 = __nccwpck_require__(35526); +const core_1 = __nccwpck_require__(42186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(71017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 81327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(22037); +const fs_1 = __nccwpck_require__(57147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 78974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(81595)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(26993)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(51472)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(16217)); + +var _nil = _interopRequireDefault(__nccwpck_require__(32381)); + +var _version = _interopRequireDefault(__nccwpck_require__(40427)); + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(26385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 5842: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 32381: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 26385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 86230: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 9784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 38844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 61458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 81595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 26993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(65920)); + +var _md = _interopRequireDefault(__nccwpck_require__(5842)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 65920: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(26385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 51472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 16217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(65920)); + +var _sha = _interopRequireDefault(__nccwpck_require__(38844)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 92609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(86230)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 40427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 71514: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(71576); +const tr = __importStar(__nccwpck_require__(88159)); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map + +/***/ }), + +/***/ 88159: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const events = __importStar(__nccwpck_require__(82361)); +const child = __importStar(__nccwpck_require__(32081)); +const path = __importStar(__nccwpck_require__(71017)); +const io = __importStar(__nccwpck_require__(47351)); +const ioUtil = __importStar(__nccwpck_require__(81962)); +const timers_1 = __nccwpck_require__(39512); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + })); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 28090: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashFiles = exports.create = void 0; +const internal_globber_1 = __nccwpck_require__(28298); +const internal_hash_files_1 = __nccwpck_require__(2448); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +/** + * Computes the sha256 hash of a glob + * + * @param patterns Patterns separated by newlines + * @param currentWorkspace Workspace used when matching files + * @param options Glob options + * @param verbose Enables verbose logging + */ +function hashFiles(patterns, currentWorkspace = '', options, verbose = false) { + return __awaiter(this, void 0, void 0, function* () { + let followSymbolicLinks = true; + if (options && typeof options.followSymbolicLinks === 'boolean') { + followSymbolicLinks = options.followSymbolicLinks; + } + const globber = yield create(patterns, { followSymbolicLinks }); + return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose); + }); +} +exports.hashFiles = hashFiles; +//# sourceMappingURL=glob.js.map + +/***/ }), + +/***/ 51026: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOptions = void 0; +const core = __importStar(__nccwpck_require__(42186)); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + matchDirectories: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.matchDirectories === 'boolean') { + result.matchDirectories = copy.matchDirectories; + core.debug(`matchDirectories '${result.matchDirectories}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), + +/***/ 28298: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultGlobber = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const fs = __importStar(__nccwpck_require__(57147)); +const globOptionsHelper = __importStar(__nccwpck_require__(51026)); +const path = __importStar(__nccwpck_require__(71017)); +const patternHelper = __importStar(__nccwpck_require__(29005)); +const internal_match_kind_1 = __nccwpck_require__(81063); +const internal_pattern_1 = __nccwpck_require__(64536); +const internal_search_state_1 = __nccwpck_require__(89117); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map + +/***/ }), + +/***/ 2448: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashFiles = void 0; +const crypto = __importStar(__nccwpck_require__(6113)); +const core = __importStar(__nccwpck_require__(42186)); +const fs = __importStar(__nccwpck_require__(57147)); +const stream = __importStar(__nccwpck_require__(12781)); +const util = __importStar(__nccwpck_require__(73837)); +const path = __importStar(__nccwpck_require__(71017)); +function hashFiles(globber, currentWorkspace, verbose = false) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const writeDelegate = verbose ? core.info : core.debug; + let hasMatch = false; + const githubWorkspace = currentWorkspace + ? currentWorkspace + : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const result = crypto.createHash('sha256'); + let count = 0; + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + writeDelegate(file); + if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { + writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); + continue; + } + if (fs.statSync(file).isDirectory()) { + writeDelegate(`Skip directory '${file}'.`); + continue; + } + const hash = crypto.createHash('sha256'); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs.createReadStream(file), hash); + result.write(hash.digest()); + count++; + if (!hasMatch) { + hasMatch = true; + } + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + result.end(); + if (hasMatch) { + writeDelegate(`Found ${count} files to hash.`); + return result.digest('hex'); + } + else { + writeDelegate(`No matches found for glob`); + return ''; + } + }); +} +exports.hashFiles = hashFiles; +//# sourceMappingURL=internal-hash-files.js.map + +/***/ }), + +/***/ 81063: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), + +/***/ 1849: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(__nccwpck_require__(71017)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; + } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; +} +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; +} +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map + +/***/ }), + +/***/ 96836: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Path = void 0; +const path = __importStar(__nccwpck_require__(71017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map + +/***/ }), + +/***/ 29005: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(__nccwpck_require__(1849)); +const internal_match_kind_1 = __nccwpck_require__(81063); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map + +/***/ }), + +/***/ 64536: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Pattern = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const minimatch_1 = __nccwpck_require__(83973); +const internal_match_kind_1 = __nccwpck_require__(81063); +const internal_path_1 = __nccwpck_require__(96836); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map + +/***/ }), + +/***/ 89117: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map + +/***/ }), + +/***/ 35526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 96255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +const pm = __importStar(__nccwpck_require__(19835)); +const tunnel = __importStar(__nccwpck_require__(74294)); +const undici_1 = __nccwpck_require__(41773); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (!useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if tunneling agent isn't assigned create a new agent + if (!agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 19835: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 81962: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(57147)); +const path = __importStar(__nccwpck_require__(71017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 47351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(39491); +const path = __importStar(__nccwpck_require__(71017)); +const ioUtil = __importStar(__nccwpck_require__(81962)); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } + } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 4654: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const tls = __importStar(__nccwpck_require__(24404)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const events_1 = __nccwpck_require__(82361); +const agent_base_1 = __nccwpck_require__(70694); +const url_1 = __nccwpck_require__(57310); +const debug = (0, debug_1.default)('http-proxy-agent'); +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + addRequest(req, opts) { + req._header = null; + this.setRequestProps(req, opts); + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + super.addRequest(req, opts); + } + setRequestProps(req, opts) { + const { proxy } = this; + const protocol = opts.secureEndpoint ? 'https:' : 'http:'; + const hostname = req.getHeader('host') || 'localhost'; + const base = `${protocol}//${hostname}`; + const url = new url_1.URL(req.path, base); + if (opts.port !== 80) { + url.port = String(opts.port); + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = String(url); + // Inject the `Proxy-Authorization` header if necessary. + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + const value = headers[name]; + if (value) { + req.setHeader(name, value); + } + } + } + async connect(req, opts) { + req._header = null; + if (!req.path.includes('://')) { + this.setRequestProps(req, opts); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._implicitHeader(); + if (req.outputData && req.outputData.length > 0) { + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + // Create a socket connection to the proxy server. + let socket; + if (this.proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(this.connectOpts); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + await (0, events_1.once)(socket, 'connect'); + return socket; + } +} +HttpProxyAgent.protocols = ['http', 'https']; +exports.HttpProxyAgent = HttpProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 84100: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var coreRestPipeline = __nccwpck_require__(29146); +var tslib = __nccwpck_require__(4351); +var coreAuth = __nccwpck_require__(98834); +var coreUtil = __nccwpck_require__(80637); +var coreHttpCompat = __nccwpck_require__(25083); +var coreClient = __nccwpck_require__(7611); +var coreXml = __nccwpck_require__(17309); +var logger$1 = __nccwpck_require__(89497); +var abortController = __nccwpck_require__(1753); +var crypto = __nccwpck_require__(6113); +var coreTracing = __nccwpck_require__(19363); +var stream = __nccwpck_require__(12781); +var coreLro = __nccwpck_require__(90334); +var events = __nccwpck_require__(82361); +var fs = __nccwpck_require__(57147); +var util = __nccwpck_require__(73837); +var buffer = __nccwpck_require__(14300); + +function _interopNamespaceDefault(e) { + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n.default = e; + return Object.freeze(n); +} + +var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat); +var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient); +var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs); +var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The `@azure/logger` configuration for this package. + */ +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The base class from which all request policies derive. + */ +class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const SDK_VERSION = "12.26.0"; +const SERVICE_VERSION = "2025-01-05"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +const REQUEST_TIMEOUT = 100 * 1000; // In ms +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-copy-source-error-code", + "x-ms-copy-source-status-code", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path || "/"; + path = escape(path); + urlParsed.pathname = path; + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + // client constructors assume accountSas does *not* start with ? + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.pathname = path; + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : undefined; + // mutating searchParams will change the encoding, so we have to do this ourselves + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } + } + } + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); + } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { + var _a; + const urlParsed = new URL(url); + return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; + } + catch (e) { + return undefined; + } +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } + catch (e) { + return undefined; + } +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.hostname.split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.pathname.split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} +/** + * Escape the blobName but keep path separator ('/'). + */ +function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); +} +/** + * A typesafe helper for ensuring that a given response object has + * the original _response attached. + * @param response - A response object from calling a client operation + * @returns The same object, but with known _response property + */ +function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS$1 = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS$1.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS$1.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. + // if (response) { + // // Retry select Copy Source Error Codes. + // if (response?.status >= 400) { + // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); + // if (copySourceError !== undefined) { + // switch (copySourceError) { + // case "InternalError": + // case "OperationTimedOut": + // case "ServerBusy": + // return true; + // } + // } + // } + // } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/* + * We need to imitate .Net culture-aware sorting, which is used in storage service. + * Below tables contain sort-keys for en-US culture. + */ +const table_lv0 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, + 0x723, 0x725, 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, + 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe02, 0xe09, 0xe0a, + 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, + 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, + 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, + 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, + 0x0, 0x750, 0x0, +]); +const table_lv2 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, + 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, + 0x12, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +]); +const table_lv4 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +]); +function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; +} +function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; + } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 0x1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 0x1; + if (weight1 === 0x1 && weight2 === 0x1) { + i = 0; + j = 0; + ++curr_level; + } + else if (weight1 === weight2) { + ++i; + ++j; + } + else if (weight1 === 0) { + ++i; + } + else if (weight2 === 0) { + ++j; + } + else { + return weight1 < weight2; + } + } + return false; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +let _defaultHttpClient; +function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); + } + return _defaultHttpClient; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The programmatic identifier of the StorageBrowserPolicy. + */ +const storageBrowserPolicyName = "storageBrowserPolicy"; +/** + * storageBrowserPolicy is a policy used to prevent browsers from caching requests + * and to remove cookies and explicit content-length headers. + */ +function storageBrowserPolicy() { + return { + name: storageBrowserPolicyName, + async sendRequest(request, next) { + if (coreUtil.isNode) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.delete(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.delete(HeaderConstants.CONTENT_LENGTH); + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Name of the {@link storageRetryPolicy} + */ +const storageRetryPolicyName = "storageRetryPolicy"; +/** + * RetryPolicy types. + */ +var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", +]; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +function storageRetryPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f; + const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { + var _a, _b; + if (attempt >= maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error) { + for (const retriableError of retriableErrors) { + if (error.name.toUpperCase().includes(retriableError) || + error.message.toUpperCase().includes(retriableError) || + (error.code && error.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && + (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || error) { + const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. + // if (response) { + // // Retry select Copy Source Error Codes. + // if (response?.status >= 400) { + // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); + // if (copySourceError !== undefined) { + // switch (copySourceError) { + // case "InternalError": + // case "OperationTimedOut": + // case "ServerBusy": + // return true; + // } + // } + // } + // } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: storageRetryPolicyName, + async sendRequest(request, next) { + // Set the server-side timeout query parameter "timeout=[seconds]" + if (tryTimeoutInMs) { + request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || + !secondaryUrl || + !["GET", "HEAD", "OPTIONS"].includes(request.method) || + attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = undefined; + error = undefined; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (e) { + if (coreRestPipeline.isRestError(e)) { + logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error = e; + } + else { + logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); + if (retryAgain) { + await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; + } + if (response) { + return response; + } + throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The programmatic identifier of the storageSharedKeyCredentialPolicy. + */ +const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; +/** + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. + */ +function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, HeaderConstants.DATE), + getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + getCanonicalizedHeadersString(request) + + getCanonicalizedResourceString(request); + const signature = crypto.createHmac("sha256", options.accountKey) + .update(stringToSign, "utf8") + .digest("base64"); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + */ + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + */ + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } + } + headersArray.sort((a, b) => { + return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreUtil.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The programmatic identifier of the storageCorrectContentLengthPolicy. + */ +const storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; +/** + * storageCorrectContentLengthPolicy to correctly set Content-Length header with request body length. + */ +function storageCorrectContentLengthPolicy() { + function correctContentLength(request) { + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + } + return { + name: storageCorrectContentLengthPolicyName, + async sendRequest(request, next) { + correctContentLength(request); + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + this.options = options; + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +function newPipeline(credential, pipelineOptions = {}) { + if (!credential) { + credential = new AnonymousCredential(); + } + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; +} +function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory, + ]; + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); + }); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + // if there are any left over, wrap in a requestPolicyFactoryPolicy + return { + wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + afterRetry: hasInjector, + }; + } + } + return undefined; +} +function getCoreClientOptions(pipeline) { + var _a; + const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix + ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { + additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + logger: logger.info, + }, userAgentOptions: { + userAgentPrefix, + }, serializationOptions: { + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + }, deserializationOptions: { + parseXML: coreXml.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + } })); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); + corePipeline.addPolicy(storageCorrectContentLengthPolicy()); + corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy(storageBrowserPolicy()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); + } + const credential = getCredentialFromPipeline(pipeline); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + pipeline._corePipeline = corePipeline; + } + return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); +} +function getCredentialFromPipeline(pipeline) { + // see if we squirreled one away on the type itself + if (pipeline._credential) { + return pipeline._credential; + } + // if it came from another package, loop over the factories and look for one like before + let credential = new AnonymousCredential(); + for (const factory of pipeline.factories) { + if (coreAuth.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + credential = factory.credential; + } + else if (isStorageSharedKeyCredential(factory)) { + return factory; + } + } + return credential; +} +function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential) { + return true; + } + return factory.constructor.name === "StorageSharedKeyCredential"; +} +function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential) { + return true; + } + return factory.constructor.name === "AnonymousCredential"; +} +function isCoreHttpBearerTokenFactory(factory) { + return coreAuth.isTokenCredential(factory.credential); +} +function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageBrowserPolicyFactory"; +} +function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageRetryPolicyFactory"; +} +function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; +} +function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; +} +function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy", + ]; + const mockHttpClient = { + sendRequest: async (request) => { + return { + request, + headers: request.headers.clone(), + status: 500, + }; + }, + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + // bundlers sometimes add a custom suffix to the class name to make it unique + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); + }); +} + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging", + }, + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule", + }, + }, + }, + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String", + }, + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + }, + }, + }, + }, +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String", + }, + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean", + }, + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean", + }, + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, + }, +}; +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + days: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number", + }, + }, + }, + }, +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", + }, + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, + }, +}; +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String", + }, + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String", + }, + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String", + }, + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String", + }, + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number", + }, + }, + }, + }, +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String", + }, + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String", + }, + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String", + }, + }, + }, + }, +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String", + }, + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String", + }, + }, + authenticationErrorDetail: { + serializedName: "AuthenticationErrorDetail", + xmlName: "AuthenticationErrorDetail", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + }, + }, + }, + }, +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"], + }, + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean", + }, + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", + }, + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties", + }, + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + }, + }, +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String", + }, + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String", + }, + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean", + }, + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123", + }, + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String", + }, + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String", + }, + }, + }, + }, +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String", + }, + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String", + }, + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String", + }, + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String", + }, + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String", + }, + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String", + }, + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String", + }, + }, + }, + }, +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String", + }, + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String", + }, + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + }, + }, + }, + }, +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag", + }, + }, + }, + }, + }, + }, +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String", + }, + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String", + }, + }, + }, + }, +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String", + }, + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + }, + }, + }, + }, +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String", + }, + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String", + }, + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String", + }, + }, + }, + }, +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, +}; +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName", + }, + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean", + }, + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean", + }, + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + }, + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + }, + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean", + }, + }, + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123", + }, + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String", + }, + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String", + }, + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray", + }, + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String", + }, + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String", + }, + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean", + }, + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean", + }, + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String", + }, + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123", + }, + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number", + }, + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean", + }, + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold", + ], + }, + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String", + }, + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123", + }, + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number", + }, + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean", + }, + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String", + }, + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix", + }, + }, + }, + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName", + }, + }, + }, + }, +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, + }, + }, + }, +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, + }, + }, + }, +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number", + }, + }, + }, + }, +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange", + }, + }, + }, + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number", + }, + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number", + }, + }, + }, + }, +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number", + }, + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number", + }, + }, + }, + }, +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String", + }, + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String", + }, + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization", + }, + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization", + }, + }, + }, + }, +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat", + }, + }, + }, + }, +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"], + }, + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + }, + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + }, + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + }, + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "Dictionary", + value: { type: { name: "any" } }, + }, + }, + }, + }, +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String", + }, + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String", + }, + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String", + }, + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String", + }, + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String", + }, + }, + }, + }, +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField", + }, + }, + }, + }, + }, + }, +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String", + }, + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String", + }, + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number", + }, + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number", + }, + }, + }, + }, +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String", + }, + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String", + }, + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + }, + }, +}; +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String", + }, + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean", + }, + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String", + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String", + }, + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean", + }, + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String", + }, + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean", + }, + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number", + }, + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + }, + }, +}; +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + }, + }, +}; +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + AccessPolicy: AccessPolicy, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobFlatListSegment: BlobFlatListSegment, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPrefix: BlobPrefix, + BlobPropertiesInternal: BlobPropertiesInternal, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobServiceProperties: BlobServiceProperties, + BlobServiceStatistics: BlobServiceStatistics, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobTag: BlobTag, + BlobTags: BlobTags, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + Block: Block, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockList: BlockList, + BlockLookupList: BlockLookupList, + ClearRange: ClearRange, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerItem: ContainerItem, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerProperties: ContainerProperties, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + CorsRule: CorsRule, + DelimitedTextConfiguration: DelimitedTextConfiguration, + FilterBlobItem: FilterBlobItem, + FilterBlobSegment: FilterBlobSegment, + GeoReplication: GeoReplication, + JsonTextConfiguration: JsonTextConfiguration, + KeyInfo: KeyInfo, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + ListContainersSegmentResponse: ListContainersSegmentResponse, + Logging: Logging, + Metrics: Metrics, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageList: PageList, + PageRange: PageRange, + QueryFormat: QueryFormat, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + RetentionPolicy: RetentionPolicy, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + SignedIdentifier: SignedIdentifier, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + UserDelegationKey: UserDelegationKey +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String", + }, + }, +}; +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties, +}; +const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, +}; +const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String", + }, + }, + skipEncoding: true, +}; +const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String", + }, + }, +}; +const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number", + }, + }, +}; +const version = { + parameterPath: "version", + mapper: { + defaultValue: "2025-01-05", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String", + }, + }, +}; +const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, +}; +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, +}; +const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String", + }, + }, +}; +const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String", + }, + }, +}; +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number", + }, + }, +}; +const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"], + }, + }, + }, + }, + collectionFormat: "CSV", +}; +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo, +}; +const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String", + }, + }, +}; +const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream", + }, + }, +}; +const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number", + }, + }, +}; +const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String", + }, + }, +}; +const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String", + }, + }, +}; +const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String", + }, + }, +}; +const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, +}; +const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, +}; +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope", + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String", + }, + }, +}; +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride", + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean", + }, + }, +}; +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, +}; +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier", + }, + }, + }, + }, +}; +const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String", + }, + }, +}; +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String", + }, + }, +}; +const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String", + }, + }, +}; +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String", + }, + }, +}; +const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number", + }, + }, +}; +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String", + }, + }, +}; +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, +}; +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number", + }, + }, +}; +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String", + }, + }, +}; +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions", + ], + }, + }, + }, + }, + collectionFormat: "CSV", +}; +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String", + }, + }, +}; +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String", + }, + }, +}; +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String", + }, + }, +}; +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String", + }, + }, +}; +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean", + }, + }, +}; +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean", + }, + }, +}; +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String", + }, + }, +}; +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String", + }, + }, +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String", + }, + }, +}; +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String", + }, + }, +}; +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String", + }, + }, +}; +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"], + }, + }, +}; +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String", + }, + }, +}; +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String", + }, + }, +}; +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String", + }, + }, +}; +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String", + }, + }, +}; +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String", + }, + }, +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String", + }, + }, +}; +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String", + }, + }, +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String", + }, + }, +}; +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, +}; +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince", + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince", + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String", + }, + }, +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch", + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String", + }, + }, +}; +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String", + }, + }, +}; +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, +}; +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String", + }, + }, +}; +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean", + }, + }, +}; +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, +}; +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String", + }, + }, +}; +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray", + }, + }, +}; +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String", + }, + }, +}; +const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"], + }, + }, +}; +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String", + }, + }, +}; +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String", + }, + }, +}; +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest, +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags, +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray", + }, + }, +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, +}; +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, +}; +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + defaultValue: 0, + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, +}; +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String", + }, + }, +}; +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream", + }, + }, +}; +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, +}; +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String", + }, + }, +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number", + }, + }, +}; +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number", + }, + }, +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number", + }, + }, +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String", + }, + }, +}; +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, +}; +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String", + }, + }, +}; +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray", + }, + }, +}; +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String", + }, + }, +}; +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String", + }, + }, +}; +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String", + }, + }, +}; +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"], + }, + }, +}; +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number", + }, + }, +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition", + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number", + }, + }, +}; +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String", + }, + }, +}; +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean", + }, + }, +}; +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String", + }, + }, +}; +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList, +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"], + }, + }, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing Service operations. */ +class ServiceImpl { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders, + }, + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders, + }, + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders, + }, + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: ServiceSubmitBatchHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders, + }, + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + accept, + version, + requestId, + contentLength, + multipartContentType, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing Container operations. */ +class ContainerImpl { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + return this.client.sendOperationRequest({ options }, createOperationSpec$2); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + return this.client.sendOperationRequest({ options }, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" }, + }, + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + }, + headersMapper: ContainerGetAccessPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders, + }, + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4, +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: ContainerSubmitBatchHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders, + }, + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2, + ], + urlParameters: [url], + headerParameters: [ + accept, + version, + requestId, + contentLength, + multipartContentType, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4, +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing Blob operations. */ +class BlobImpl { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + return this.client.sendOperationRequest({ options }, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + return this.client.sendOperationRequest({ options }, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobDownloadHeaders, + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobDownloadHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp12, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp12, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp13, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders, + }, + 202: { + headersMapper: BlobSetTierHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobQueryHeaders, + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobQueryHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders, + }, + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3, +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders, + }, + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing PageBlob operations. */ +class PageBlobImpl { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$2, +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource, + ], + isXML: true, + serializer: xmlSerializer$2, +}; - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing AppendBlob operations. */ +class AppendBlobImpl { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1, + ], + isXML: true, + serializer: xmlSerializer$1, +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$1, +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1, + ], + isXML: true, + serializer: xmlSerializer$1, +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition, + ], + isXML: true, + serializer: xmlSerializer$1, +}; - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing BlockBlob operations. */ +class BlockBlobImpl { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + blobType2, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer, +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties, + ], + isXML: true, + serializer: xmlSerializer, +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer, +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1, + ], + isXML: true, + serializer: xmlSerializer, +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders, + }, + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer, +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + ], + isXML: true, + serializer: xmlSerializer, +}; - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + var _a, _b; + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + const defaults = { + requestContentType: "application/json; charset=utf-8", + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.26.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { + userAgentPrefix, + }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); + super(optionsWithDefaults); + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2025-01-05"; + this.service = new ServiceImpl(this); + this.container = new ContainerImpl(this); + this.blob = new BlobImpl(this); + this.pageBlob = new PageBlobImpl(this); + this.appendBlob = new AppendBlobImpl(this); + this.blockBlob = new BlockBlobImpl(this); + } +}; - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * @internal + */ +class StorageContextClient extends StorageClient$1 { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = Object.assign({}, operationSpec); + if (operationSpecToSend.path === "/{containerName}" || + operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; + } + return super.sendOperationRequest(operationArguments, operationSpecToSend); + } +} - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = getCredentialFromPipeline(pipeline); + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} - this.raw = version +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Creates a span using the global tracer. + * @internal + */ +const tracingClient = coreTracing.createTracingClient({ + packageName: "@azure/storage-blob", + packageVersion: SDK_VERSION, + namespace: "Microsoft.Storage", +}); - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +class SASQueryParameters { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() + return undefined; + } + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", // Signed object ID + "sktid", // Signed tenant ID + "skt", // Signed key start time + "ske", // Signed key expiry time + "sks", // Signed key service + "skv", // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } } -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; +} +function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign: stringToSign, + }; } - -SemVer.prototype.toString = function () { - return this.version +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; } - -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), + stringToSign: stringToSign, + }; } - -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), + stringToSign: stringToSign, + }; } - -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - } while (++i) + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; } - -SemVer.prototype.compareBuild = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - var i = 0 - do { - var a = this.build[i] - var b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); } - } while (++i) + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; } -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +class BlobLeaseClient { + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) + else { + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] + if (!leaseId) { + leaseId = coreUtil.randomUUID(); } - } - break + this._leaseId = leaseId; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + this._leaseId = proposedLeaseId; + return response; + }); + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + var _a; + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }); + }); + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + var _a; + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }; + return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); + } +} - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +class RetriableReadableStream extends stream.Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceAbortedHandler = () => { + const abortError = new abortController.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + // needed for Node14 + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +class BlobDownloadResponse { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * Returns the date and time the blob was created. + * + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } } -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } - - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (_a) { + // no-op + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); } - } } - return defaultResult // may be undefined - } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} - -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} - -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} - -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} - -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} - -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} - -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} - -exports.compareBuild = compareBuild -function compareBuild (a, b, loose) { - var versionA = new SemVer(a, loose) - var versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) } - -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } } - -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(a, b, loose) - }) +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } } - -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(b, a, loose) - }) +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } } - -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } } - -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } } -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; } -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroReader { + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects() { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (_a) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } } -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroReadable { } -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } } -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError('Invalid operator: ' + op) - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } } -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +class BlobQueryResponse { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; } - } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; } - } - - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } - - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) -} - -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var m = comp.match(r) - - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } -} - -Comparator.prototype.toString = function () { - return this.value -} - -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; } - } - - return cmp(version, this.operator, this.semver, this.options) -} - -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; } - } - - var rangeTmp - - if (this.operator === '') { - if (this.value === '') { - return true + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; } - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - if (comp.value === '') { - return true + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; } - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} - -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') - - // First, split based on boolean or || - this.set = this.raw.split('||').map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + this.raw) - } - - this.format() -} - -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} - -Range.prototype.toString = function () { - return this.range } -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - - return set +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed. + */ + BlockBlobTier["Cold"] = "Cold"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). } - -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some(function (thisComparators) { - return ( - isSatisfiable(thisComparators, options) && - range.set.some(function (rangeComparators) { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every(function (thisComparator) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } } - -// take a set of comparators and determine whether there -// exists a version which can satisfy it -function isSatisfiable (comparators, options) { - var result = true - var remainingComparators = comparators.slice() - var testComparator = remainingComparators.pop() - - while (result && remainingComparators.length) { - result = remainingComparators.every(function (otherComparator) { - return testComparator.intersects(otherComparator, options) - }) - - testComparator = remainingComparators.pop() - } - - return result +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +/** + * + * To get OAuth audience for a storage account for blob service. + */ +function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; } -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); } -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreUtil.delay(this.intervalInMs); + } } - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; } -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; } -function replaceTilde (comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } } - - debug('tilde return', ret) - return ret - }) } -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } } -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const maxBufferLength = buffer.constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' + if (buffers) { + this.fill(buffers, totalLength); } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } } - - debug('caret return', ret) - return ret - }) + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } } -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } } -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; - if (gtlt === '=' && anyX) { - gtlt = '' +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { + /** + * The name of the blob. + */ + get name() { + return this._name; } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = this.storageClientContext.blob; + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreUtil.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + }); + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + throw e; + } + }); + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + }); + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.delete(updatedOptions)); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + tags: toBlobTags(tags), + })); + }); + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + }); + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f, _g; + return assertResponse(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, + immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + var _a; + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; } - } - - ret = gtlt + M + '.' + m + '.' + p + pr - } else if (xm) { - ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr - } else if (xp) { - ret = '>=' + M + '.' + m + '.0' + pr + - ' <' + M + '.' + (+m + 1) + '.0' + pr + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (blockSize === 0) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + }); } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(safeRe[t.STAR], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() -} - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + }); } - } - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } } - } - return false -} - -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return assertResponse(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - } + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} - -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } + /** + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve) => { + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign; } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options = {}) { + return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false + /** + * Set immutability policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options = {}) { + return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) } - -exports.coerce = coerce -function coerce (version, options) { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - var match = null - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - var next - while ((next = safeRe[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = this.storageClientContext.appendBlob; + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + const conditions = { ifNoneMatch: ETagAny }; + return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - // leave it in a clean state - safeRe[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(match[2] + - '.' + (match[3] || '0') + - '.' + (match[4] || '0'), options) } - - -/***/ }), - -/***/ 4138: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var v1 = __nccwpck_require__(1610); -var v4 = __nccwpck_require__(8373); - -var uuid = v4; -uuid.v1 = v1; -uuid.v4 = v4; - -module.exports = uuid; - - -/***/ }), - -/***/ 5694: -/***/ ((module) => { - /** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + * BlockBlobClient defines a set of operations applicable to block blobs. */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); -} - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 4069: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(6113); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - - -/***/ }), - -/***/ 1610: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(4069); -var bytesToUuid = __nccwpck_require__(5694); - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html - -var _nodeId; -var _clockseq; - -// Previous uuid creation time -var _lastMSecs = 0; -var _lastNSecs = 0; - -// See https://github.com/uuidjs/uuid for API details -function v1(options, buf, offset) { - var i = buf && offset || 0; - var b = buf || []; - - options = options || {}; - var node = options.node || _nodeId; - var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; - - // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - if (node == null || clockseq == null) { - var seedBytes = rng(); - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [ - seedBytes[0] | 0x01, - seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] - ]; +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; } - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } - } - - // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); - - // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; - - // Time since last uuid creation (in msecs) - var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; - - // Per 4.2.1.2, Bump clockseq on clock regression - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } - - // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } - - // Per 4.2.1.2 Throw error if too many uuids are requested - if (nsecs >= 10000) { - throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; - - // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - msecs += 12219292800000; - - // `time_low` - var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; - - // `time_mid` - var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; - - // `time_high_and_version` - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - b[i++] = tmh >>> 16 & 0xff; - - // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - b[i++] = clockseq >>> 8 | 0x80; - - // `clock_seq_low` - b[i++] = clockseq & 0xff; - - // `node` - for (var n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf ? buf : bytesToUuid(b); -} - -module.exports = v1; - - -/***/ }), - -/***/ 8373: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(4069); -var bytesToUuid = __nccwpck_require__(5694); - -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - options = options || {}; - - var rnds = options.random || (options.rng || rng)(); - - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; - - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + if (!coreUtil.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + }); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f; + return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + }); + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + }); + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (coreUtil.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + }); + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + var _a, _b; + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); + } + const numBlocks = Math.floor((size - 1) / blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + }); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await fsStat(filePath)).size; + return this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + }); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + let blockNum = 0; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + }); } - } - - return buf || bytesToUuid(rnds); } - -module.exports = v4; - - -/***/ }), - -/***/ 7351: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(2037)); -const utils_1 = __nccwpck_require__(5278); /** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value + * PageBlobClient defines a set of operations applicable to page blobs. */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } + pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = this.storageClientContext.pageBlob; + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.pageBlobContext.create(0, size, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.uploadPages(count, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.clearPages(0, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + marker: marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 2186: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(7351); -const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const oidc_utils_1 = __nccwpck_require__(8041); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { /** - * A code indicating that the action was successful + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - ExitCode[ExitCode["Success"] = 0] = "Success"; + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + var _a; + const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshot, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(result); + }); + } /** - * A code indicating that the action was a failure + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, + leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshotOrUrl, + range: rangeToString({ + offset: offset, + count: count, + }), + marker: marker, + maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - command_1.issueCommand('set-env', { name }, convertedVal); -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueFileCommand('PATH', inputPath); + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); } - else { - command_1.issueCommand('add-path', {}, inputPath); + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } + } + }); } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. - * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. - * Returns an empty string if the value is not defined. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; } - if (options && options.trimWhitespace === false) { - return val; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevSnapshotUrl, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } - return val.trim(); -} -exports.getInput = getInput; -/** - * Gets the values of an multiline input. Each value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string[] - * - */ -function getMultilineInput(name, options) { - const inputs = getInput(name, options) - .split('\n') - .filter(x => x !== ''); - if (options && options.trimWhitespace === false) { - return inputs; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - return inputs.map(input => input.trim()); -} -exports.getMultilineInput = getMultilineInput; -/** - * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. - * Support boolean input list: `true | True | TRUE | false | False | FALSE` . - * The return value is also in boolean type. - * ref: https://yaml.org/spec/1.2/spec.html#id2804923 - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns boolean - */ -function getBooleanInput(name, options) { - const trueValue = ['true', 'True', 'TRUE']; - const falseValue = ['false', 'False', 'FALSE']; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + - `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); -} -exports.getBooleanInput = getBooleanInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - const filePath = process.env['GITHUB_OUTPUT'] || ''; - if (filePath) { - return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function error(message, properties = {}) { - command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds a warning issue - * @param message warning issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function warning(message, properties = {}) { - command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Adds a notice issue - * @param message notice issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function notice(message, properties = {}) { - command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.notice = notice; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - const filePath = process.env['GITHUB_STATE'] || ''; - if (filePath) { - return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } - command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -function getIDToken(aud) { - return __awaiter(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); - }); } -exports.getIDToken = getIDToken; -/** - * Summary exports - */ -var summary_1 = __nccwpck_require__(1327); -Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); -/** - * @deprecated use core.summary - */ -var summary_2 = __nccwpck_require__(1327); -Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); -/** - * Path exports - */ -var path_utils_1 = __nccwpck_require__(2981); -Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); -Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); -Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 717: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -"use strict"; - -// For internal use, subject to change. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(7147)); -const os = __importStar(__nccwpck_require__(2037)); -const uuid_1 = __nccwpck_require__(8974); -const utils_1 = __nccwpck_require__(5278); -function issueFileCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); } -exports.issueFileCommand = issueFileCommand; -function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - const convertedValue = utils_1.toCommandValue(value); - // These should realistically never happen, but just in case someone finds a - // way to exploit uuid generation let's not allow keys or values that contain - // the delimiter. - if (key.includes(delimiter)) { - throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); } - if (convertedValue.includes(delimiter)) { - throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } } - return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; } -exports.prepareKeyValueMessage = prepareKeyValueMessage; -//# sourceMappingURL=file-command.js.map - -/***/ }), - -/***/ 8041: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; +Mutex.keys = {}; +Mutex.listeners = {}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); -const core_1 = __nccwpck_require__(2186); -class OidcClient { - static createHttpClient(allowRetry = true, maxRetry = 10) { - const requestOptions = { - allowRetries: allowRetry, - maxRetries: maxRetry - }; - return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); } - static getRequestToken() { - const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); } - return token; } - static getIDTokenUrl() { - const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); } - return runtimeUrl; } - static getCall(id_token_url) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const httpclient = OidcClient.createHttpClient(); - const res = yield httpclient - .getJson(id_token_url) - .catch(error => { - throw new Error(`Failed to get ID Token. \n - Error Code : ${error.statusCode}\n - Error Message: ${error.message}`); + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); - const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; - if (!id_token) { - throw new Error('Response json body do not have ID Token field'); - } - return id_token; }); } - static getIDToken(audience) { - return __awaiter(this, void 0, void 0, function* () { - try { - // New ID Token is requested from action service - let id_token_url = OidcClient.getIDTokenUrl(); - if (audience) { - const encodedAudience = encodeURIComponent(audience); - id_token_url = `${id_token_url}&audience=${encodedAudience}`; - } - core_1.debug(`ID token url is ${id_token_url}`); - const id_token = yield OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); - return id_token; - } - catch (error) { - throw new Error(`Error message: ${error.message}`); - } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); }); } } -exports.OidcClient = OidcClient; -//# sourceMappingURL=oidc-utils.js.map - -/***/ }), - -/***/ 2981: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(1017)); /** - * toPosixPath converts the given path to the posix form. On Windows, \\ will be - * replaced with /. - * - * @param pth. Path to transform. - * @return string Posix path. + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. */ -function toPosixPath(pth) { - return pth.replace(/[\\]/g, '/'); +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreUtil.randomUUID(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const corePipeline = coreRestPipeline.createEmptyPipeline(); + corePipeline.addPolicy(coreClient.serializationPolicy({ + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#", + }, + }, + }), { phase: "Serialize" }); + // Use batch header filter policy to exclude unnecessary headers + corePipeline.addPolicy(batchHeaderFilterPolicy()); + // Use batch assemble policy to assemble request and intercept request from going to wire + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + const pipeline = new Pipeline([]); + // attach the v2 pipeline to this one + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, // sub request constant prefix + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID + "", // empty line after sub request's content ID + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const [name, value] of request.headers) { + this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request) { + batchRequest.appendSubRequestToBody(request); + return { + request, + status: 200, + headers: coreRestPipeline.createHttpHeaders(), + }; + }, + }; } -exports.toPosixPath = toPosixPath; -/** - * toWin32Path converts the given path to the win32 form. On Linux, / will be - * replaced with \\. - * - * @param pth. Path to transform. - * @return string Win32 path. - */ -function toWin32Path(pth) { - return pth.replace(/[/]/g, '\\'); +function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request, next) { + let xMsHeaderName = ""; + for (const [name] of request.headers) { + if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; + } + } + if (xMsHeaderName !== "") { + request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return next(request); + }, + }; } -exports.toWin32Path = toWin32Path; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * toPlatformPath converts the given path to a platform-specific path. It does - * this by replacing instances of / and \ with the platform-specific path - * separator. + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. * - * @param pth The path to platformize. - * @return string The platform-specific path. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch */ -function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path.sep); -} -exports.toPlatformPath = toPlatformPath; -//# sourceMappingURL=path-utils.js.map - -/***/ }), - -/***/ 1327: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(2037); -const fs_1 = __nccwpck_require__(7147); -const { access, appendFile, writeFile } = fs_1.promises; -exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; -exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; -class Summary { - constructor() { - this._buffer = ''; +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = storageClientContext.container; + } + else { + this.serviceOrContainerContext = storageClientContext.service; + } } /** - * Finds the summary file path from the environment, rejects if env var is not found or file does not exist - * Also checks r/w permissions. - * - * @returns step summary file path + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. */ - filePath() { - return __awaiter(this, void 0, void 0, function* () { - if (this._filePath) { - return this._filePath; + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); } - const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; - if (!pathFromEnv) { - throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); } - try { - yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); } - catch (_a) { - throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); } - this._filePath = pathFromEnv; - return this._filePath; - }); + } + return this.submitBatch(batch); } /** - * Wraps content in an HTML tag, adding any HTML attributes + * Submit batch request which consists of multiple subrequests. * - * @param {string} tag HTML tag to wrap - * @param {string | null} content content within the tag - * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * - * @returns {string} content wrapped in HTML element + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + }); + } +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +class ContainerClient extends StorageClient { + /** + * The name of the container. */ - wrap(tag, content, attrs = {}) { - const htmlAttrs = Object.entries(attrs) - .map(([key, value]) => ` ${key}="${value}"`) - .join(''); - if (!content) { - return `<${tag}${htmlAttrs}>`; + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } } - return `<${tag}${htmlAttrs}>${content}`; + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = this.storageClientContext.container; } /** - * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @param {SummaryWriteOptions} [options] (optional) options for write operation + * @param options - Options to Container Create operation. * - * @returns {Promise} summary instance + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` */ - write(options) { - return __awaiter(this, void 0, void 0, function* () { - const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); - const filePath = yield this.filePath(); - const writeFunc = overwrite ? writeFile : appendFile; - yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); - return this.emptyBuffer(); + async create(options = {}) { + return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.create(updatedOptions)); }); } /** - * Clears the summary buffer and wipes the summary file + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @returns {Summary} summary instance + * @param options - */ - clear() { - return __awaiter(this, void 0, void 0, function* () { - return this.emptyBuffer().write({ overwrite: true }); + async createIfNotExists(options = {}) { + return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + else { + throw e; + } + } }); } /** - * Returns the current summary buffer as a string + * Returns true if the Azure container resource represented by this client exists; false otherwise. * - * @returns {string} string of summary buffer + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - */ - stringify() { - return this._buffer; + async exists(options = {}) { + return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); } /** - * If the summary buffer is empty + * Creates a {@link BlobClient} * - * @returns {boolen} true if the buffer is empty + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. */ - isEmptyBuffer() { - return this._buffer.length === 0; + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Resets the summary buffer without writing to summary file + * Creates an {@link AppendBlobClient} * - * @returns {Summary} summary instance + * @param blobName - An append blob name */ - emptyBuffer() { - this._buffer = ''; - return this; + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Adds raw text to the summary buffer + * Creates a {@link BlockBlobClient} * - * @param {string} text content to add - * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * @param blobName - A block blob name * - * @returns {Summary} summary instance + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - addRaw(text, addEOL = false) { - this._buffer += text; - return addEOL ? this.addEOL() : this; + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Adds the operating system-specific end-of-line marker to the buffer + * Creates a {@link PageBlobClient} * - * @returns {Summary} summary instance + * @param blobName - A page blob name */ - addEOL() { - return this.addRaw(os_1.EOL); + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Adds an HTML codeblock to the summary buffer + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties * - * @param {string} code content to render within fenced code block - * @param {string} lang (optional) language to syntax highlight code + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. * - * @returns {Summary} summary instance + * @param options - Options to Container Get Properties operation. */ - addCodeBlock(code, lang) { - const attrs = Object.assign({}, (lang && { lang })); - const element = this.wrap('pre', this.wrap('code', code), attrs); - return this.addRaw(element).addEOL(); + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + }); } /** - * Adds an HTML list to the summary buffer - * - * @param {string[]} items list of items to render - * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @returns {Summary} summary instance + * @param options - Options to Container Delete operation. */ - addList(items, ordered = false) { - const tag = ordered ? 'ol' : 'ul'; - const listItems = items.map(item => this.wrap('li', item)).join(''); - const element = this.wrap(tag, listItems); - return this.addRaw(element).addEOL(); + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Adds an HTML table to the summary buffer - * - * @param {SummaryTableCell[]} rows table rows + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @returns {Summary} summary instance + * @param options - Options to Container Delete operation. */ - addTable(rows) { - const tableBody = rows - .map(row => { - const cells = row - .map(cell => { - if (typeof cell === 'string') { - return this.wrap('td', cell); + async deleteIfExists(options = {}) { + return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } - const { header, data, colspan, rowspan } = cell; - const tag = header ? 'th' : 'td'; - const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); - return this.wrap(tag, data, attrs); - }) - .join(''); - return this.wrap('tr', cells); - }) - .join(''); - const element = this.wrap('table', tableBody); - return this.addRaw(element).addEOL(); + throw e; + } + }); } /** - * Adds a collapsable HTML details element to the summary buffer + * Sets one or more user-defined name-value pairs for the specified container. * - * @param {string} label text for the closed state - * @param {string} content collapsable content + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. * - * @returns {Summary} summary instance + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. */ - addDetails(label, content) { - const element = this.wrap('details', this.wrap('summary', label) + content); - return this.addRaw(element).addEOL(); + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Adds an HTML image tag to the summary buffer + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. * - * @param {string} src path to the image you to embed - * @param {string} alt text description of the image - * @param {SummaryImageOptions} options (optional) addition image attributes + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * - * @returns {Summary} summary instance + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. */ - addImage(src, alt, options) { - const { width, height } = options || {}; - const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); - const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); - return this.addRaw(element).addEOL(); + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.getAccessPolicy({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + }); } /** - * Adds an HTML section heading element + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return assertResponse(await this.containerContext.setAccessPolicy({ + abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. * - * @param {string} text heading text - * @param {number | string} [level=1] (optional) the heading level, default: 1 + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. * - * @returns {Summary} summary instance + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - addHeading(text, level) { - const tag = `h${level}`; - const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) - ? tag - : 'h1'; - const element = this.wrap(allowedTag, text); - return this.addRaw(element).addEOL(); + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + }); + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return blobClient.delete(updatedOptions); + }); } /** - * Adds an HTML thematic break (
) to the summary buffer + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * @returns {Summary} summary instance + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. */ - addSeparator() { - const element = this.wrap('hr', null); - return this.addRaw(element).addEOL(); + async listBlobFlatSegment(marker, options = {}) { + return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + }); } /** - * Adds an HTML line break (
) to the summary buffer + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * @returns {Summary} summary instance + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. */ - addBreak() { - const element = this.wrap('br', null); - return this.addRaw(element).addEOL(); + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; + }); } /** - * Adds an HTML blockquote to the summary buffer + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * - * @param {string} text quote text - * @param {string} cite (optional) citation url + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects * - * @returns {Summary} summary instance + * @param options - Options to list blobs operation. */ - addQuote(text, cite) { - const attrs = Object.assign({}, (cite && { cite })); - const element = this.wrap('blockquote', text, attrs); - return this.addRaw(element).addEOL(); + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsFlatSegmentResponse = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); } /** - * Adds an HTML anchor tag to the summary buffer + * Returns an async iterable iterator to list all the blobs + * under the specified account. * - * @param {string} text link text/content - * @param {string} href hyperlink + * .byPage() returns an async iterable iterator to list the blobs in pages. * - * @returns {Summary} summary instance + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. */ - addLink(text, href) { - const element = this.wrap('a', text, { href }); - return this.addRaw(element).addEOL(); - } -} -const _summary = new Summary(); -/** - * @deprecated use `core.summary` - */ -exports.markdownSummary = _summary; -exports.summary = _summary; -//# sourceMappingURL=summary.js.map - -/***/ }), - -/***/ 5278: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toCommandProperties = exports.toCommandValue = void 0; -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; } - else if (typeof input === 'string' || input instanceof String) { - return input; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -/** - * - * @param annotationProperties - * @returns The command properties to send with the actual annotation command - * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 - */ -function toCommandProperties(annotationProperties) { - if (!Object.keys(annotationProperties).length) { - return {}; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter_1) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsHierarchySegmentResponse = _c; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } + } + }); } - return { - title: annotationProperties.title, - file: annotationProperties.file, - line: annotationProperties.startLine, - endLine: annotationProperties.endLine, - col: annotationProperties.startColumn, - endColumn: annotationProperties.endColumn - }; -} -exports.toCommandProperties = toCommandProperties; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 8974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(1595)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(6993)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(1472)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(6217)); - -var _nil = _interopRequireDefault(__nccwpck_require__(2381)); - -var _version = _interopRequireDefault(__nccwpck_require__(427)); - -var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6385)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 5842: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 2381: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 6385: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 6230: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 9784: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 8844: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 1458: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; -} - -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 1595: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(9784)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + }); } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 6993: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(5920)); - -var _md = _interopRequireDefault(__nccwpck_require__(5842)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 5920: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6385)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_3, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_3) throw e_3.error; } + } + }); } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 1472: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(9784)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.pathname.split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.pathname.split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.pathname.split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; + } + /** + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve) => { + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign; + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); } - - return buf; - } - - return (0, _stringify.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 6217: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(5920)); - -var _sha = _interopRequireDefault(__nccwpck_require__(8844)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 2609: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(6230)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 427: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(2609)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); } -var _default = version; -exports["default"] = _default; - -/***/ }), - -/***/ 1514: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to delete blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getExecOutput = exports.exec = void 0; -const string_decoder_1 = __nccwpck_require__(1576); -const tr = __importStar(__nccwpck_require__(8159)); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Exec a command. - * Output will be streamed to the live console. - * Returns promise with return code + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. */ -function exec(commandLine, args, options) { - return __awaiter(this, void 0, void 0, function* () { - const commandArgs = tr.argStringToArray(commandLine); - if (commandArgs.length === 0) { - throw new Error(`Parameter 'commandLine' cannot be null or empty.`); +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } } - // Path to tool to execute should be first arg - const toolPath = commandArgs[0]; - args = commandArgs.slice(1).concat(args || []); - const runner = new tr.ToolRunner(toolPath, args, options); - return runner.exec(); - }); + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } } -exports.exec = exec; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Exec a command and get the output. - * Output will be streamed to the live console. - * Returns promise with the exit code and collected stdout and stderr + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code, stdout, and stderr + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. */ -function getExecOutput(commandLine, args, options) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - let stdout = ''; - let stderr = ''; - //Using string decoder covers the case where a mult-byte character is split - const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); - const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); - const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; - const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; - const stdErrListener = (data) => { - stderr += stderrDecoder.write(data); - if (originalStdErrListener) { - originalStdErrListener(data); - } - }; - const stdOutListener = (data) => { - stdout += stdoutDecoder.write(data); - if (originalStdoutListener) { - originalStdoutListener(data); +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); } - }; - const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); - const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); - //flush any remaining characters - stdout += stdoutDecoder.end(); - stderr += stderrDecoder.end(); - return { - exitCode, - stdout, - stderr - }; - }); -} -exports.getExecOutput = getExecOutput; -//# sourceMappingURL=exec.js.map - -/***/ }), - -/***/ 8159: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.argStringToArray = exports.ToolRunner = void 0; -const os = __importStar(__nccwpck_require__(2037)); -const events = __importStar(__nccwpck_require__(2361)); -const child = __importStar(__nccwpck_require__(2081)); -const path = __importStar(__nccwpck_require__(1017)); -const io = __importStar(__nccwpck_require__(7436)); -const ioUtil = __importStar(__nccwpck_require__(1962)); -const timers_1 = __nccwpck_require__(9512); -/* eslint-disable @typescript-eslint/unbound-method */ -const IS_WINDOWS = process.platform === 'win32'; -/* - * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. - */ -class ToolRunner extends events.EventEmitter { - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; + return accountSASServices; } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); - } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) + .sasQueryParameters; +} +function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool - if (IS_WINDOWS) { - // Windows + cmd file - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows + verbatim - else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); } - // Windows (regular) else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; - } + throw new Error("Account connection string is only supported in Node.js environment"); } } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } else { - // OSX/Linux - this can likely be improved with some form of quoting. - // creating processes on Unix is fundamentally different than Windows. - // on Unix, execvp() takes an arg array. - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - return cmd; } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - // the rest of the string ... - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); - } - return s; + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; } - catch (err) { - // streaming lines to console is best effort. Don't fail a build. - this._debug(`error processing line. Failed with error ${err}`); - return ''; + else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); } - } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env['COMSPEC'] || 'cmd.exe'; - } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); } - return this.toolPath; + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += ' '; - argline += options.windowsVerbatimArguments - ? a - : this._windowsQuoteCmdArg(a); - } - argline += '"'; - return [argline]; - } - } - return this.args; + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); } - _endsWith(str, end) { - return str.endsWith(end); + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + }); } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return (this._endsWith(upperToolPath, '.CMD') || - this._endsWith(upperToolPath, '.BAT')); + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + return containerClient.delete(updatedOptions); + }); } - _windowsQuoteCmdArg(arg) { - // for .exe, apply the normal quoting rules that libuv applies - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - // otherwise apply quoting rules specific to the cmd.exe command line parser. - // the libuv rules are generic and are not designed specifically for cmd.exe - // command line parser. - // - // for a detailed description of the cmd.exe command line parser, refer to - // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 - // need quotes for empty arg - if (!arg) { - return '""'; - } - // determine whether the arg needs to be quoted - const cmdSpecialChars = [ - ' ', - '\t', - '&', - '(', - ')', - '[', - ']', - '{', - '}', - '^', - '=', - ';', - '!', - "'", - '+', - ',', - '`', - '~', - '|', - '<', - '>', - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some(x => x === char)) { - needsQuotes = true; - break; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = assertResponse(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, + tracingOptions: updatedOptions.tracingOptions, + })); + return { containerClient, containerUndeleteResponse }; + }); + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { + var _a; + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = containerClient["storageClientContext"].container; + const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); + return { containerClient, containerRenameResponse }; + }); + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + }); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); } - } - // short-circuit if quotes not needed - if (!needsQuotes) { - return arg; - } - // the following quoting rules are very similar to the rules that by libuv applies. - // - // 1) wrap the string in quotes - // - // 2) double-up quotes - i.e. " => "" - // - // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately - // doesn't work well with a cmd.exe command line. - // - // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. - // for example, the command line: - // foo.exe "myarg:""my val""" - // is parsed by a .NET console app into an arg array: - // [ "myarg:\"my val\"" ] - // which is the same end result when applying libuv quoting rules. although the actual - // command line from libuv quoting rules would look like: - // foo.exe "myarg:\"my val\"" - // - // 3) double-up slashes that precede a quote, - // e.g. hello \world => "hello \world" - // hello\"world => "hello\\""world" - // hello\\"world => "hello\\\\""world" - // hello world\ => "hello world\\" - // - // technically this is not required for a cmd.exe command line, or the batch argument parser. - // the reasons for including this as a .cmd quoting rule are: - // - // a) this is optimized for the scenario where the argument is passed from the .cmd file to an - // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. - // - // b) it's what we've been doing previously (by deferring to node default behavior) and we - // haven't heard any complaints about that aspect. - // - // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be - // escaped when used on the command line directly - even though within a .cmd file % can be escaped - // by using %%. - // - // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts - // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. - // - // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would - // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the - // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args - // to an external program. - // - // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. - // % can be escaped within a .cmd file. - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; // double the slash + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; // double the quote + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } } - else { - quoteHit = false; + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); + }); } - _uvQuoteCmdArg(arg) { - // Tool runner wraps child_process.spawn() and needs to apply the same quoting as - // Node in certain cases where the undocumented spawn option windowsVerbatimArguments - // is used. - // - // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, - // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), - // pasting copyright notice from Node within this function: - // - // Copyright Joyent, Inc. and other Node contributors. All rights reserved. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to - // deal in the Software without restriction, including without limitation the - // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - // sell copies of the Software, and to permit persons to whom the Software is - // furnished to do so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in - // all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - // IN THE SOFTWARE. - if (!arg) { - // Need double quotation for empty argument - return '""'; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; } - if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { - // No quotation needed - return arg; + const include = []; + if (options.includeDeleted) { + include.push("deleted"); } - if (!arg.includes('"') && !arg.includes('\\')) { - // No embedded double quotes or backslashes, so I can just wrap - // quote marks around the whole thing. - return `"${arg}"`; + if (options.includeMetadata) { + include.push("metadata"); } - // Expected input/output: - // input : hello"world - // output: "hello\"world" - // input : hello""world - // output: "hello\"\"world" - // input : hello\world - // output: hello\world - // input : hello\\world - // output: hello\\world - // input : hello\"world - // output: "hello\\\"world" - // input : hello\\"world - // output: "hello\\\\\"world" - // input : hello world\ - // output: "hello world\\" - note the comment in libuv actually reads "hello world\" - // but it appears the comment is wrong, it should be "hello world\\" - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '\\'; - } - else { - quoteHit = false; - } + if (options.includeSystem) { + include.push("system"); } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 10000 + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; - } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result['windowsVerbatimArguments'] = - options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; } /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - exec() { - return __awaiter(this, void 0, void 0, function* () { - // root the tool path if it is unrooted and contains relative pathing - if (!ioUtil.isRooted(this.toolPath) && - (this.toolPath.includes('/') || - (IS_WINDOWS && this.toolPath.includes('\\')))) { - // prefer options.cwd if it is specified, however options.cwd may also need to be rooted - this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - // if the tool is only a file name, then resolve it from the PATH - // otherwise verify it exists (add extension on Windows if necessary) - this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - this._debug(`exec tool: ${this.toolPath}`); - this._debug('arguments:'); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on('debug', (message) => { - this._debug(message); - }); - if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { - return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); - } - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - let stdbuffer = ''; - if (cp.stdout) { - cp.stdout.on('data', (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); - } - let errbuffer = ''; - if (cp.stderr) { - cp.stderr.on('data', (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && - optionsNonNull.errStream && - optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr - ? optionsNonNull.errStream - : optionsNonNull.outStream; - s.write(data); - } - errbuffer = this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on('error', (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on('exit', (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on('close', (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on('done', (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit('stdline', stdbuffer); - } - if (errbuffer.length > 0) { - this.emit('errline', errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } - else { - resolve(exitCode); - } - }); - if (this.options.input) { - if (!cp.stdin) { - throw new Error('child process missing stdin'); - } - cp.stdin.end(this.options.input); - } + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, })); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; }); } -} -exports.ToolRunner = ToolRunner; -/** - * Convert an arg string to an array of args. Handles escaping - * - * @param argString string of arguments - * @returns string[] array of arguments - */ -function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ''; - function append(c) { - // we only escape double quotes. - if (escaped && c !== '"') { - arg += '\\'; - } - arg += c; - escaped = false; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); } - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; - } - else { - append(c); - } - continue; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - if (c === '\\' && escaped) { - append(c); - continue; + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); } - if (c === '\\' && inQuotes) { - escaped = true; - continue; + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - if (c === ' ' && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ''; - } - continue; + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); + return generateAccountSASQueryParametersInternal(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign; } - return args; } -exports.argStringToArray = argStringToArray; -class ExecState extends events.EventEmitter { - constructor(options, toolPath) { - super(); - this.processClosed = false; // tracks whether the process has exited and stdio is closed - this.processError = ''; - this.processExitCode = 0; - this.processExited = false; // tracks whether the process has exited - this.processStderr = false; // tracks whether stderr was written to - this.delay = 10000; // 10 seconds - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error('toolPath must not be empty'); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +exports.KnownEncryptionAlgorithmType = void 0; +(function (KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); + +Object.defineProperty(exports, "RestError", ({ + enumerable: true, + get: function () { return coreRestPipeline.RestError; } +})); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 2856: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(84492).Writable) +const inherits = (__nccwpck_require__(47261).inherits) + +const StreamSearch = __nccwpck_require__(88534) + +const PartStream = __nccwpck_require__(38710) +const HeaderParser = __nccwpck_require__(90333) + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } - else if (this.processExited) { - this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); - } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() } - _debug(message) { - this.emit('debug', message); + if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } } - _setResult() { - // determine whether there is an error - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } - else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); - } - else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() } - } - // clear the timeout - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit('done', error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / - 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); + } + }) + } } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } } -//# sourceMappingURL=toolrunner.js.map -/***/ }), +Dicer.prototype._unpause = function () { + if (!this._pause) { return } -/***/ 8090: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} -"use strict"; +module.exports = Dicer -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.hashFiles = exports.create = void 0; -const internal_globber_1 = __nccwpck_require__(8298); -const internal_hash_files_1 = __nccwpck_require__(2448); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); -} -exports.create = create; -/** - * Computes the sha256 hash of a glob - * - * @param patterns Patterns separated by newlines - * @param currentWorkspace Workspace used when matching files - * @param options Glob options - * @param verbose Enables verbose logging - */ -function hashFiles(patterns, currentWorkspace = '', options, verbose = false) { - return __awaiter(this, void 0, void 0, function* () { - let followSymbolicLinks = true; - if (options && typeof options.followSymbolicLinks === 'boolean') { - followSymbolicLinks = options.followSymbolicLinks; - } - const globber = yield create(patterns, { followSymbolicLinks }); - return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose); - }); -} -exports.hashFiles = hashFiles; -//# sourceMappingURL=glob.js.map /***/ }), -/***/ 1026: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 90333: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOptions = void 0; -const core = __importStar(__nccwpck_require__(2186)); -/** - * Returns a copy with defaults filled in. - */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - matchDirectories: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.matchDirectories === 'boolean') { - result.matchDirectories = copy.matchDirectories; - core.debug(`matchDirectories '${result.matchDirectories}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } + +const EventEmitter = (__nccwpck_require__(15673).EventEmitter) +const inherits = (__nccwpck_require__(47261).inherits) +const getLimit = __nccwpck_require__(49692) + +const StreamSearch = __nccwpck_require__(88534) + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) } - return result; + if (isMatch) { self._finish() } + }) } -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map +inherits(HeaderParser, EventEmitter) -/***/ }), +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} -/***/ 8298: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} -"use strict"; +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultGlobber = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const fs = __importStar(__nccwpck_require__(7147)); -const globOptionsHelper = __importStar(__nccwpck_require__(1026)); -const path = __importStar(__nccwpck_require__(1017)); -const patternHelper = __importStar(__nccwpck_require__(9005)); -const internal_match_kind_1 = __nccwpck_require__(1063); -const internal_pattern_1 = __nccwpck_require__(4536); -const internal_search_state_1 = __nccwpck_require__(9117); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); - } - glob() { - var e_1, _a; - return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } - } - return result; - }); - } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); - } - } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { - yield yield __await(item.path); - } - // Descend? - else if (!partialMatch) { - continue; - } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); - } - } - }); - } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); - } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; - } - // Pattern - else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } } - static stat(item, options, traversalChain) { - return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); - } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } - else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); - } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; - } - // Update the traversal chain - traversalChain.push(realPath); - } - return stats; - }); + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return } -} -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser + /***/ }), -/***/ 2448: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 38710: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.hashFiles = void 0; -const crypto = __importStar(__nccwpck_require__(6113)); -const core = __importStar(__nccwpck_require__(2186)); -const fs = __importStar(__nccwpck_require__(7147)); -const stream = __importStar(__nccwpck_require__(2781)); -const util = __importStar(__nccwpck_require__(3837)); -const path = __importStar(__nccwpck_require__(1017)); -function hashFiles(globber, currentWorkspace, verbose = false) { - var e_1, _a; - var _b; - return __awaiter(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core.info : core.debug; - let hasMatch = false; - const githubWorkspace = currentWorkspace - ? currentWorkspace - : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); - const result = crypto.createHash('sha256'); - let count = 0; - try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { - writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); - continue; - } - if (fs.statSync(file).isDirectory()) { - writeDelegate(`Skip directory '${file}'.`); - continue; - } - const hash = crypto.createHash('sha256'); - const pipeline = util.promisify(stream.pipeline); - yield pipeline(fs.createReadStream(file), hash); - result.write(hash.digest()); - count++; - if (!hasMatch) { - hasMatch = true; - } - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); - } - finally { if (e_1) throw e_1.error; } - } - result.end(); - if (hasMatch) { - writeDelegate(`Found ${count} files to hash.`); - return result.digest('hex'); - } - else { - writeDelegate(`No matches found for glob`); - return ''; - } - }); -} -exports.hashFiles = hashFiles; -//# sourceMappingURL=internal-hash-files.js.map -/***/ }), +const inherits = (__nccwpck_require__(47261).inherits) +const ReadableStream = (__nccwpck_require__(84492).Readable) -/***/ 1063: -/***/ ((__unused_webpack_module, exports) => { +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) -"use strict"; +PartStream.prototype._read = function (n) {} + +module.exports = PartStream -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MatchKind = void 0; -/** - * Indicates whether a pattern matches a path - */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map /***/ }), -/***/ 1849: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 88534: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; -const path = __importStar(__nccwpck_require__(1017)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const IS_WINDOWS = process.platform === 'win32'; + /** - * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * Copyright Brian White. All rights reserved. * - * For example, on Linux/macOS: - * - `/ => /` - * - `/hello => /` + * @see https://github.com/mscdex/streamsearch * - * For example, on Windows: - * - `C:\ => C:\` - * - `C:\hello => C:\` - * - `C: => C:` - * - `C:hello => C:` - * - `\ => \` - * - `\hello => \` - * - `\\hello => \\hello` - * - `\\hello\world => \\hello\world` - */ -function dirname(p) { - // Normalize slashes and trim unnecessary trailing slash - p = safeTrimTrailingSeparator(p); - // Windows UNC root, e.g. \\hello or \\hello\world - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; - } - // Get dirname - let result = path.dirname(p); - // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); - } - return result; -} -exports.dirname = dirname; -/** - * Roots the path if not already rooted. On Windows, relative roots like `\` - * or `C:` are expanded based on the current working directory. - */ -function ensureAbsoluteRoot(root, itemPath) { - assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - // Already rooted - if (hasAbsoluteRoot(itemPath)) { - return itemPath; - } - // Windows - if (IS_WINDOWS) { - // Check for itemPath like C: or C:foo - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - // Drive letter matches cwd? Expand to cwd - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - // Drive only, e.g. C: - if (itemPath.length === 2) { - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}`; - } - // Drive + path, e.g. C:foo - else { - if (!cwd.endsWith('\\')) { - cwd += '\\'; - } - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; - } - } - // Different drive - else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; - } - } - // Check for itemPath like \ or \foo - else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; - } - } - assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - // Otherwise ensure root ends with a separator - if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { - // Intentionally empty - } - else { - // Append separator - root += path.sep; - } - return root + itemPath; -} -exports.ensureAbsoluteRoot = ensureAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\\hello\share` and `C:\hello` (and using alternate separator). - */ -function hasAbsoluteRoot(itemPath) { - assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \\hello\share or C:\hello - return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasAbsoluteRoot = hasAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool */ -function hasRoot(itemPath) { - assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \ or \hello or \\hello - // E.g. C: or C:\hello - return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); +const EventEmitter = (__nccwpck_require__(15673).EventEmitter) +const inherits = (__nccwpck_require__(47261).inherits) + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } } -exports.hasRoot = hasRoot; -/** - * Removes redundant slashes and converts `/` to `\` on Windows - */ -function normalizeSeparators(p) { - p = p || ''; - // Windows - if (IS_WINDOWS) { - // Convert slashes on Windows - p = p.replace(/\//g, '\\'); - // Remove redundant slashes - const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello - return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC - } - // Remove redundant slashes - return p.replace(/\/\/+/g, '/'); +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 } -exports.normalizeSeparators = normalizeSeparators; -/** - * Normalizes the path separators and trims the trailing separator (when safe). - * For example, `/foo/ => /foo` but `/ => /` - */ -function safeTrimTrailingSeparator(p) { - // Short-circuit if empty - if (!p) { - return ''; - } - // Normalize separators - p = normalizeSeparators(p); - // No trailing slash - if (!p.endsWith(path.sep)) { - return p; - } - // Check '/' on Linux/macOS and '\' on Windows - if (p === path.sep) { - return p; - } - // On Windows check if drive root. E.g. C:\ - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; - } - // Otherwise trim trailing slash - return p.substr(0, p.length - 1); + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r } -exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; -//# sourceMappingURL=internal-path-helper.js.map -/***/ }), +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] -/***/ 6836: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch -"use strict"; + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Path = void 0; -const path = __importStar(__nccwpck_require__(1017)); -const pathHelper = __importStar(__nccwpck_require__(1849)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Helper class for parsing paths into segments - */ -class Path { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - // String - if (typeof itemPath === 'string') { - assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // Not rooted - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path.sep); - } - // Rooted - else { - // Add all segments, while not at the root - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - // Add the segment - const basename = path.basename(remaining); - this.segments.unshift(basename); - // Truncate the last segment - remaining = dir; - dir = pathHelper.dirname(remaining); - } - // Remainder is the root - this.segments.unshift(remaining); - } - } - // Array - else { - // Must not be empty - assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - // Each segment - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - // Must not be empty - assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); - // Normalize slashes - segment = pathHelper.normalizeSeparators(itemPath[i]); - // Root segment - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } - // All other segments - else { - // Must not contain slash - assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } - } - } + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] } - /** - * Converts the path to it's string representation - */ - toString() { - // First segment - let result = this.segments[0]; - // All others - let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } - else { - result += path.sep; - } - result += this.segments[i]; - } - return result; + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } } -} -exports.Path = Path; -//# sourceMappingURL=internal-path.js.map -/***/ }), + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } -/***/ 9005: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff -"use strict"; + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.partialMatch = exports.match = exports.getSearchPaths = void 0; -const pathHelper = __importStar(__nccwpck_require__(1849)); -const internal_match_kind_1 = __nccwpck_require__(1063); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Given an array of patterns, returns an array of paths to search. - * Duplicates and paths under other included paths are filtered out. - */ -function getSearchPaths(patterns) { - // Ignore negate patterns - patterns = patterns.filter(x => !x.negate); - // Create a map of all search paths - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - searchPathMap[key] = 'candidate'; - } - const result = []; - for (const pattern of patterns) { - // Check if already included - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - if (searchPathMap[key] === 'included') { - continue; - } - // Check for an ancestor search path - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; - } - tempKey = parent; - parent = pathHelper.dirname(tempKey); - } - // Include the search pattern in the result - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = 'included'; - } + this._bufpos = len + return len } - return result; + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len } -exports.getSearchPaths = getSearchPaths; -/** - * Matches the patterns against the path - */ -function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); - } - else { - result |= pattern.match(itemPath); - } - } - return result; + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] } -exports.match = match; -/** - * Checks whether to descend further into the directory - */ -function partialMatch(patterns, itemPath) { - return patterns.some(x => !x.negate && x.partialMatch(itemPath)); + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true } -exports.partialMatch = partialMatch; -//# sourceMappingURL=internal-pattern-helper.js.map + +module.exports = SBMH + /***/ }), -/***/ 4536: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 33438: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Pattern = void 0; -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const pathHelper = __importStar(__nccwpck_require__(1849)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const minimatch_1 = __nccwpck_require__(3973); -const internal_match_kind_1 = __nccwpck_require__(1063); -const internal_path_1 = __nccwpck_require__(6836); -const IS_WINDOWS = process.platform === 'win32'; -class Pattern { - constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { - /** - * Indicates whether matches should be excluded from the result set - */ - this.negate = false; - // Pattern overload - let pattern; - if (typeof patternOrNegate === 'string') { - pattern = patternOrNegate.trim(); - } - // Segments overload - else { - // Convert to pattern - segments = segments || []; - assert_1.default(segments.length, `Parameter 'segments' must not empty`); - const root = Pattern.getLiteral(segments[0]); - assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; - } - } - // Negate - while (pattern.startsWith('!')) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); - } - // Normalize slashes and ensures absolute root - pattern = Pattern.fixupPattern(pattern, homedir); - // Segments - this.segments = new internal_path_1.Path(pattern).segments; - // Trailing slash indicates the pattern should only match directories, not regular files - this.trailingSeparator = pathHelper - .normalizeSeparators(pattern) - .endsWith(path.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - // Search path (literal path prior to the first glob segment) - let foundGlob = false; - const searchSegments = this.segments - .map(x => Pattern.getLiteral(x)) - .filter(x => !foundGlob && !(foundGlob = x === '')); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - // Root RegExp (required when determining partial match) - this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); - this.isImplicitPattern = isImplicitPattern; - // Create minimatch - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); - } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - // Last segment is globstar? - if (this.segments[this.segments.length - 1] === '**') { - // Normalize slashes - itemPath = pathHelper.normalizeSeparators(itemPath); - // Append a trailing slash. Otherwise Minimatch will not match the directory immediately - // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns - // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. - if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { - // Note, this is safe because the constructor ensures the pattern has an absolute root. - // For example, formats like C: and C:foo on Windows are resolved to an absolute root. - itemPath = `${itemPath}${path.sep}`; - } - } - else { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - } - // Match - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; - } - return internal_match_kind_1.MatchKind.None; - } - /** - * Indicates whether the pattern may match descendants of the specified path - */ - partialMatch(itemPath) { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // matchOne does not handle root path correctly - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); - } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); - } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS - .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment - .replace(/\?/g, '[?]') // escape '?' - .replace(/\*/g, '[*]'); // escape '*' - } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern, homedir) { - // Empty - assert_1.default(pattern, 'pattern cannot be empty'); - // Must not contain `.` segment, unless first segment - // Must not contain `..` segment - const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); - assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r - assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - // Normalize slashes - pattern = pathHelper.normalizeSeparators(pattern); - // Replace leading `.` segment - if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { - pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); - } - // Replace leading `~` segment - else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { - homedir = homedir || os.homedir(); - assert_1.default(homedir, 'Unable to determine HOME directory'); - assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = Pattern.globEscape(homedir) + pattern.substr(1); - } - // Replace relative drive root, e.g. pattern is C: or C:foo - else if (IS_WINDOWS && - (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(2); - } - // Replace relative root, e.g. pattern is \ or \foo - else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); - if (!root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(1); - } - // Otherwise ensure absolute root - else { - pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); - } - return pathHelper.normalizeSeparators(pattern); - } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ''; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - // Escape - if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } - // Wildcard - else if (c === '*' || c === '?') { - return ''; - } - // Character set - else if (c === '[' && i + 1 < segment.length) { - let set = ''; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - // Escape - if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { - set += segment[++i2]; - continue; - } - // Closed - else if (c2 === ']') { - closed = i2; - break; - } - // Otherwise - else { - set += c2; - } - } - // Closed? - if (closed >= 0) { - // Cannot convert - if (set.length > 1) { - return ''; - } - // Convert to literal - if (set) { - literal += set; - i = closed; - continue; - } - } - // Otherwise fall thru - } - // Append - literal += c; - } - return literal; - } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + +const WritableStream = (__nccwpck_require__(84492).Writable) +const { inherits } = __nccwpck_require__(47261) +const Dicer = __nccwpck_require__(2856) + +const MultipartParser = __nccwpck_require__(90415) +const UrlencodedParser = __nccwpck_require__(16780) +const parseParams = __nccwpck_require__(34426) + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') } -exports.Pattern = Pattern; -//# sourceMappingURL=internal-pattern.js.map -/***/ }), +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} -/***/ 9117: -/***/ ((__unused_webpack_module, exports) => { +module.exports = Busboy +module.exports["default"] = Busboy +module.exports.Busboy = Busboy -"use strict"; +module.exports.Dicer = Dicer -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SearchState = void 0; -class SearchState { - constructor(path, level) { - this.path = path; - this.level = level; - } -} -exports.SearchState = SearchState; -//# sourceMappingURL=internal-search-state.js.map /***/ }), -/***/ 5526: -/***/ (function(__unused_webpack_module, exports) { +/***/ 90415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map -/***/ }), +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams -/***/ 6255: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +const { Readable } = __nccwpck_require__(84492) +const { inherits } = __nccwpck_require__(47261) -"use strict"; +const Dicer = __nccwpck_require__(2856) -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(3685)); -const https = __importStar(__nccwpck_require__(5687)); -const pm = __importStar(__nccwpck_require__(9835)); -const tunnel = __importStar(__nccwpck_require__(4294)); -const undici_1 = __nccwpck_require__(1773); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers || (exports.Headers = Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); - } - readBodyBuffer() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - const chunks = []; - this.message.on('data', (chunk) => { - chunks.push(chunk); - }); - this.message.on('end', () => { - resolve(Buffer.concat(chunks)); - }); - })); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); - } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); +const parseParams = __nccwpck_require__(34426) +const decodeText = __nccwpck_require__(99136) +const basename = __nccwpck_require__(60496) +const getLimit = __nccwpck_require__(49692) + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); - } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); + + ++nfiles + + if (!boy._events.file) { + self.parser._ignore() + return } - else { - req.end(); + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - getAgentDispatcher(serverUrl) { - const parsedUrl = new URL(serverUrl); - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (!useProxy) { - return; + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize } - return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; + + onEnd = function () { + curFile = undefined + file.push(null) } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() } - return additionalHeaders[header] || clientHeader || _default; + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart + + +/***/ }), + +/***/ 16780: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Decoder = __nccwpck_require__(89730) +const decodeText = __nccwpck_require__(99136) +const getLimit = __nccwpck_require__(49692) + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (!useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true } - // if tunneling agent isn't assigned create a new agent - if (!agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true } - return agent; + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } } - _getProxyAgentDispatcher(parsedUrl, proxyUrl) { - let proxyAgent; - if (this._keepAlive) { - proxyAgent = this._proxyAgentDispatcher; - } - // if agent is already assigned use that agent. - if (proxyAgent) { - return proxyAgent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { - token: `${proxyUrl.username}:${proxyUrl.password}` - }))); - this._proxyAgentDispatcher = proxyAgent; - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { - rejectUnauthorized: false - }); + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded + + +/***/ }), + +/***/ 89730: +/***/ ((module) => { + +"use strict"; + + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined } - return proxyAgent; + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder + + +/***/ }), + +/***/ 60496: +/***/ ((module) => { + +"use strict"; + + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); + } + return (path === '..' || path === '.' ? '' : path) +} + + +/***/ }), + +/***/ 99136: +/***/ ((module) => { + +"use strict"; + + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function decodeText (text, textEncoding, destEncoding) { + if (text) { + if (textDecoders.has(destEncoding)) { + try { + return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) + } catch (e) { } + } else { + try { + textDecoders.set(destEncoding, new TextDecoder(destEncoding)) + return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) + } catch (e) { } } + } + return text } -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map + +module.exports = decodeText + /***/ }), -/***/ 9835: -/***/ ((__unused_webpack_module, exports) => { +/***/ 49692: +/***/ ((module) => { + +"use strict"; + + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} + + +/***/ }), + +/***/ 34426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const decodeText = __nccwpck_require__(99136) + +const RE_ENCODED = /%([a-fA-F0-9]{2})/g + +function encodedReplacer (match, byte) { + return String.fromCharCode(parseInt(byte, 16)) +} + +function parseParams (str) { + const res = [] + let state = 'key' + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + + for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = 'key' + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === 'charset' || state === 'lang') && char === "'") { + if (state === 'charset') { + state = 'lang' + charset = tmp.substring(1) + } else { state = 'value' } + tmp = '' + continue + } else if (state === 'key' && + (char === '*' || char === '=') && + res.length) { + if (char === '*') { state = 'charset' } else { state = 'value' } + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = 'key' + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams + + +/***/ }), + +/***/ 46412: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(61546); +const extend = __nccwpck_require__(38171); +const resource_stream_1 = __nccwpck_require__(72199); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; } else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; + query = firstArgument; } - })(); - if (proxyVar) { - try { - return new URL(proxyVar); + if (typeof lastArgument === 'function') { + callback = lastArgument; } - catch (_a) { - if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) - return new URL(`http://${proxyVar}`); + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const reqHost = reqUrl.hostname; - if (isLoopbackAddress(reqHost)) { - return true; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperNoProxyItem === '*' || - upperReqHosts.some(x => x === upperNoProxyItem || - x.endsWith(`.${upperNoProxyItem}`) || - (upperNoProxyItem.startsWith('.') && - x.endsWith(`${upperNoProxyItem}`)))) { - return true; + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); } - return false; -} -exports.checkBypass = checkBypass; -function isLoopbackAddress(host) { - const hostLower = host.toLowerCase(); - return (hostLower === 'localhost' || - hostLower.startsWith('127.') || - hostLower.startsWith('[::1]') || - hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } -//# sourceMappingURL=proxy.js.map +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 1962: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 72199: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var _a; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; -const fs = __importStar(__nccwpck_require__(7147)); -const path = __importStar(__nccwpck_require__(1017)); -_a = fs.promises -// export const {open} = 'fs' -, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -// export const {open} = 'fs' -exports.IS_WINDOWS = process.platform === 'win32'; -// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 -exports.UV_FS_O_EXLOCK = 0x10000000; -exports.READONLY = fs.constants.O_RDONLY; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); -} -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); -} -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(12781); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); } - return p.startsWith('/'); -} -exports.isRooted = isRooted; -/** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. - */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } + _read() { + if (this._reading) { + return; } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + let more = true; + for (const result of results) { + if (this._ended) { + break; } - return filePath; + more = this.push(result); } - else { - if (isUnixExecutable(stats)) { - return filePath; - } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); } - } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); } - return ''; - }); -} -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); -} -// Get the path of cmd.exe in windows -function getCmdPath() { - var _a; - return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; } -exports.getCmdPath = getCmdPath; -//# sourceMappingURL=io-util.js.map +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map /***/ }), -/***/ 7436: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 3497: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; -const assert_1 = __nccwpck_require__(9491); -const path = __importStar(__nccwpck_require__(1017)); -const ioUtil = __importStar(__nccwpck_require__(1962)); -/** - * Copies a file or folder. - * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js - * - * @param source source path - * @param dest destination path - * @param options optional. See CopyOptions. - */ -function cp(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const { force, recursive, copySourceDirectory } = readCopyOptions(options); - const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; - // Dest is an existing file, but not forcing - if (destStat && destStat.isFile() && !force) { - return; - } - // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() && copySourceDirectory - ? path.join(dest, path.basename(source)) - : dest; - if (!(yield ioUtil.exists(source))) { - throw new Error(`no such file or directory: ${source}`); - } - const sourceStat = yield ioUtil.stat(source); - if (sourceStat.isDirectory()) { - if (!recursive) { - throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); - } - else { - yield cpDirRecursive(source, newDest, 0, force); - } - } - else { - if (path.relative(source, newDest) === '') { - // a file cannot be copied to itself - throw new Error(`'${newDest}' and '${source}' are the same file`); - } - yield copyFile(source, newDest, force); - } - }); -} -exports.cp = cp; -/** - * Moves a path. - * - * @param source source path - * @param dest destination path - * @param options optional. See MoveOptions. - */ -function mv(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - if (yield ioUtil.exists(dest)) { - let destExists = true; - if (yield ioUtil.isDirectory(dest)) { - // If dest is directory copy src into dest - dest = path.join(dest, path.basename(source)); - destExists = yield ioUtil.exists(dest); - } - if (destExists) { - if (options.force == null || options.force) { - yield rmRF(dest); - } - else { - throw new Error('Destination already exists'); - } - } - } - yield mkdirP(path.dirname(dest)); - yield ioUtil.rename(source, dest); - }); -} -exports.mv = mv; +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(12781); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. /** - * Remove a path recursively with force + * Populate the `{{projectId}}` placeholder. * - * @param inputPath path to remove + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. */ -function rmRF(inputPath) { - return __awaiter(this, void 0, void 0, function* () { - if (ioUtil.IS_WINDOWS) { - // Check for invalid characters - // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file - if (/[*"<>|]/.test(inputPath)) { - throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); } } - try { - // note if path does not exist, error is silent - yield ioUtil.rm(inputPath, { - force: true, - maxRetries: 3, - recursive: true, - retryDelay: 300 - }); - } - catch (err) { - throw new Error(`File was unable to be removed ${err}`); + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); } - }); + value = value.replace(/{{projectId}}/g, projectId); + } + return value; } -exports.rmRF = rmRF; +exports.replaceProjectIdToken = replaceProjectIdToken; /** - * Make a directory. Creates the full path with folders in between - * Will throw if it fails - * - * @param fsPath path to create - * @returns Promise + * Custom error type for missing project ID errors. */ -function mkdirP(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - yield ioUtil.mkdir(fsPath, { recursive: true }); - }); +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } } -exports.mkdirP = mkdirP; +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 19203: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; /** - * Returns path of a tool had the tool actually been invoked. Resolves via paths. - * If you check and the tool does not exist, it will throw. + * Wraps a callback style function to conditionally return a promise. * - * @param tool name of the tool - * @param check whether to check if tool exists - * @returns Promise path to tool + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped */ -function which(tool, check) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); } - // recursive when check=true - if (check) { - const result = yield which(tool, false); - if (!result) { - if (ioUtil.IS_WINDOWS) { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); } else { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + resolve(callbackArgs); } - } - return result; - } - const matches = yield findInPath(tool); - if (matches && matches.length > 0) { - return matches[0]; - } - return ''; - }); + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; } -exports.which = which; +exports.promisify = promisify; /** - * Returns a list of all occurrences of the given tool on the system path. + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. * - * @returns Promise the paths of the tool + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. */ -function findInPath(tool) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { - for (const extension of process.env['PATHEXT'].split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return [filePath]; - } - return []; - } - // if any path separators, return empty - if (tool.includes(path.sep)) { - return []; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } - } - // find all matches - const matches = []; - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); - if (filePath) { - matches.push(filePath); - } - } - return matches; +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on }); -} -exports.findInPath = findInPath; -function readCopyOptions(options) { - const force = options.force == null ? true : options.force; - const recursive = Boolean(options.recursive); - const copySourceDirectory = options.copySourceDirectory == null - ? true - : Boolean(options.copySourceDirectory); - return { force, recursive, copySourceDirectory }; -} -function cpDirRecursive(sourceDir, destDir, currentDepth, force) { - return __awaiter(this, void 0, void 0, function* () { - // Ensure there is not a run away recursive copy - if (currentDepth >= 255) - return; - currentDepth++; - yield mkdirP(destDir); - const files = yield ioUtil.readdir(sourceDir); - for (const fileName of files) { - const srcFile = `${sourceDir}/${fileName}`; - const destFile = `${destDir}/${fileName}`; - const srcFileStat = yield ioUtil.lstat(srcFile); - if (srcFileStat.isDirectory()) { - // Recurse - yield cpDirRecursive(srcFile, destFile, currentDepth, force); - } - else { - yield copyFile(srcFile, destFile, force); - } + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); } - // Change the mode for the newly created directory - yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); }); } -// Buffered file copy -function copyFile(srcFile, destFile, force) { - return __awaiter(this, void 0, void 0, function* () { - if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { - // unlink/re-link it - try { - yield ioUtil.lstat(destFile); - yield ioUtil.unlink(destFile); - } - catch (e) { - // Try to override file permission - if (e.code === 'EPERM') { - yield ioUtil.chmod(destFile, '0666'); - yield ioUtil.unlink(destFile); - } - // other errors = it doesn't exist, no work to do - } - // Copy over symlink - const symlinkFull = yield ioUtil.readlink(srcFile); - yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); } - else if (!(yield ioUtil.exists(destFile)) || force) { - yield ioUtil.copyFile(srcFile, destFile); + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); } }); } -//# sourceMappingURL=io.js.map +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 2856: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 44458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const WritableStream = (__nccwpck_require__(4492).Writable) -const inherits = (__nccwpck_require__(7261).inherits) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(33542)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(29411)); -const StreamSearch = __nccwpck_require__(8534) +var _v3 = _interopRequireDefault(__nccwpck_require__(83424)); -const PartStream = __nccwpck_require__(8710) -const HeaderParser = __nccwpck_require__(333) +var _v4 = _interopRequireDefault(__nccwpck_require__(64051)); -const DASH = 45 -const B_ONEDASH = Buffer.from('-') -const B_CRLF = Buffer.from('\r\n') -const EMPTY_FN = function () {} +var _nil = _interopRequireDefault(__nccwpck_require__(46570)); -function Dicer (cfg) { - if (!(this instanceof Dicer)) { return new Dicer(cfg) } - WritableStream.call(this, cfg) +var _version = _interopRequireDefault(__nccwpck_require__(35611)); - if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); - if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); - this._headerFirst = cfg.headerFirst +var _parse = _interopRequireDefault(__nccwpck_require__(99645)); - this._dashes = 0 - this._parts = 0 - this._finished = false - this._realFinish = false - this._isPreamble = true - this._justMatched = false - this._firstWrite = true - this._inHeader = true - this._part = undefined - this._cb = undefined - this._ignoreData = false - this._partOpts = { highWaterMark: cfg.partHwm } - this._pause = false +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const self = this - this._hparser = new HeaderParser(cfg) - this._hparser.on('header', function (header) { - self._inHeader = false - self._part.emit('header', header) - }) -} -inherits(Dicer, WritableStream) +/***/ }), -Dicer.prototype.emit = function (ev) { - if (ev === 'finish' && !this._realFinish) { - if (!this._finished) { - const self = this - process.nextTick(function () { - self.emit('error', new Error('Unexpected end of multipart data')) - if (self._part && !self._ignoreData) { - const type = (self._isPreamble ? 'Preamble' : 'Part') - self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) - self._part.push(null) - process.nextTick(function () { - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - return - } - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - } - } else { WritableStream.prototype.emit.apply(this, arguments) } +/***/ 84953: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); } -Dicer.prototype._write = function (data, encoding, cb) { - // ignore unexpected data (e.g. extra trailer data after finished) - if (!this._hparser && !this._bparser) { return cb() } +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 46570: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 99645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); - if (this._headerFirst && this._isPreamble) { - if (!this._part) { - this._part = new PartStream(this._partOpts) - if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } - } - const r = this._hparser.push(data) - if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // allows for "easier" testing - if (this._firstWrite) { - this._bparser.push(B_CRLF) - this._firstWrite = false +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); } - this._bparser.push(data) + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - if (this._pause) { this._cb = cb } else { cb() } -} + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ -Dicer.prototype.reset = function () { - this._part = undefined - this._bparser = undefined - this._hparser = undefined -} + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ -Dicer.prototype.setBoundary = function (boundary) { - const self = this - this._bparser = new StreamSearch('\r\n--' + boundary) - this._bparser.on('info', function (isMatch, data, start, end) { - self._oninfo(isMatch, data, start, end) - }) -} + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ -Dicer.prototype._ignore = function () { - if (this._part && !this._ignoreData) { - this._ignoreData = true - this._part.on('error', EMPTY_FN) - // we must perform some kind of read on the stream even though we are - // ignoring the data, otherwise node's Readable stream will not emit 'end' - // after pushing null to the stream - this._part.resume() - } + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; } -Dicer.prototype._oninfo = function (isMatch, data, start, end) { - let buf; const self = this; let i = 0; let r; let shouldWriteMore = true +var _default = parse; +exports["default"] = _default; - if (!this._part && this._justMatched && data) { - while (this._dashes < 2 && (start + i) < end) { - if (data[start + i] === DASH) { - ++i - ++this._dashes - } else { - if (this._dashes) { buf = B_ONEDASH } - this._dashes = 0 - break - } - } - if (this._dashes === 2) { - if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } - this.reset() - this._finished = true - // no more parts will be added - if (self._parts === 0) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } - } - if (this._dashes) { return } - } - if (this._justMatched) { this._justMatched = false } - if (!this._part) { - this._part = new PartStream(this._partOpts) - this._part._read = function (n) { - self._unpause() - } - if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } - if (!this._isPreamble) { this._inHeader = true } - } - if (data && start < end && !this._ignoreData) { - if (this._isPreamble || !this._inHeader) { - if (buf) { shouldWriteMore = this._part.push(buf) } - shouldWriteMore = this._part.push(data.slice(start, end)) - if (!shouldWriteMore) { this._pause = true } - } else if (!this._isPreamble && this._inHeader) { - if (buf) { this._hparser.push(buf) } - r = this._hparser.push(data.slice(start, end)) - if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } - } - } - if (isMatch) { - this._hparser.reset() - if (this._isPreamble) { this._isPreamble = false } else { - if (start !== end) { - ++this._parts - this._part.on('end', function () { - if (--self._parts === 0) { - if (self._finished) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } else { - self._unpause() - } - } - }) - } - } - this._part.push(null) - this._part = undefined - this._ignoreData = false - this._justMatched = true - this._dashes = 0 - } -} +/***/ }), -Dicer.prototype._unpause = function () { - if (!this._pause) { return } +/***/ 94323: +/***/ ((__unused_webpack_module, exports) => { - this._pause = false - if (this._cb) { - const cb = this._cb - this._cb = undefined - cb() - } -} +"use strict"; -module.exports = Dicer +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; /***/ }), -/***/ 333: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 91430: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const EventEmitter = (__nccwpck_require__(5673).EventEmitter) -const inherits = (__nccwpck_require__(7261).inherits) -const getLimit = __nccwpck_require__(9692) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; -const StreamSearch = __nccwpck_require__(8534) +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); -const B_DCRLF = Buffer.from('\r\n\r\n') -const RE_CRLF = /\r\n/g -const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -function HeaderParser (cfg) { - EventEmitter.call(this) +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - cfg = cfg || {} - const self = this - this.nread = 0 - this.maxed = false - this.npairs = 0 - this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) - this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) - this.buffer = '' - this.header = {} - this.finished = false - this.ss = new StreamSearch(B_DCRLF) - this.ss.on('info', function (isMatch, data, start, end) { - if (data && !self.maxed) { - if (self.nread + end - start >= self.maxHeaderSize) { - end = self.maxHeaderSize - self.nread + start - self.nread = self.maxHeaderSize - self.maxed = true - } else { self.nread += (end - start) } +let poolPtr = rnds8Pool.length; - self.buffer += data.toString('binary', start, end) - } - if (isMatch) { self._finish() } - }) -} -inherits(HeaderParser, EventEmitter) +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); -HeaderParser.prototype.push = function (data) { - const r = this.ss.push(data) - if (this.finished) { return r } -} + poolPtr = 0; + } -HeaderParser.prototype.reset = function () { - this.finished = false - this.buffer = '' - this.header = {} - this.ss.reset() + return rnds8Pool.slice(poolPtr, poolPtr += 16); } -HeaderParser.prototype._finish = function () { - if (this.buffer) { this._parseHeader() } - this.ss.matches = this.ss.maxMatches - const header = this.header - this.header = {} - this.buffer = '' - this.finished = true - this.nread = this.npairs = 0 - this.maxed = false - this.emit('header', header) -} +/***/ }), -HeaderParser.prototype._parseHeader = function () { - if (this.npairs === this.maxHeaderPairs) { return } +/***/ 77416: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const lines = this.buffer.split(RE_CRLF) - const len = lines.length - let m, h +"use strict"; - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (lines[i].length === 0) { continue } - if (lines[i][0] === '\t' || lines[i][0] === ' ') { - // folded header content - // RFC2822 says to just remove the CRLF and not the whitespace following - // it, so we follow the RFC and include the leading whitespace ... - if (h) { - this.header[h][this.header[h].length - 1] += lines[i] - continue - } - } - const posColon = lines[i].indexOf(':') - if ( - posColon === -1 || - posColon === 0 - ) { - return - } - m = RE_HDR.exec(lines[i]) - h = m[1].toLowerCase() - this.header[h] = this.header[h] || [] - this.header[h].push((m[2] || '')) - if (++this.npairs === this.maxHeaderPairs) { break } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); } -} -module.exports = HeaderParser + return _crypto.default.createHash('sha1').update(bytes).digest(); +} +var _default = sha1; +exports["default"] = _default; /***/ }), -/***/ 8710: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 32122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const inherits = (__nccwpck_require__(7261).inherits) -const ReadableStream = (__nccwpck_require__(4492).Readable) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -function PartStream (opts) { - ReadableStream.call(this, opts) +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); } -inherits(PartStream, ReadableStream) -PartStream.prototype._read = function (n) {} +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields -module.exports = PartStream + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} +var _default = stringify; +exports["default"] = _default; /***/ }), -/***/ 8534: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 33542: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/** - * Copyright Brian White. All rights reserved. - * - * @see https://github.com/mscdex/streamsearch - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation - * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool - */ -const EventEmitter = (__nccwpck_require__(5673).EventEmitter) -const inherits = (__nccwpck_require__(7261).inherits) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -function SBMH (needle) { - if (typeof needle === 'string') { - needle = Buffer.from(needle) - } +var _rng = _interopRequireDefault(__nccwpck_require__(91430)); - if (!Buffer.isBuffer(needle)) { - throw new TypeError('The needle has to be a String or a Buffer.') - } +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); - const needleLength = needle.length +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - if (needleLength === 0) { - throw new Error('The needle cannot be an empty String/Buffer.') - } +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; - if (needleLength > 256) { - throw new Error('The needle cannot have a length bigger than 256.') - } +let _clockseq; // Previous uuid creation time - this.maxMatches = Infinity - this.matches = 0 - this._occ = new Array(256) - .fill(needleLength) // Initialize occurrence table. - this._lookbehind_size = 0 - this._needle = needle - this._bufpos = 0 +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - this._lookbehind = Buffer.alloc(needleLength) +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 - // Populate occurrence table with analysis of the needle, - // ignoring last letter. - for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var - this._occ[needle[i]] = needleLength - 1 - i - } -} -inherits(SBMH, EventEmitter) + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); -SBMH.prototype.reset = function () { - this._lookbehind_size = 0 - this.matches = 0 - this._bufpos = 0 -} + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } -SBMH.prototype.push = function (chunk, pos) { - if (!Buffer.isBuffer(chunk)) { - chunk = Buffer.from(chunk, 'binary') - } - const chlen = chunk.length - this._bufpos = pos || 0 - let r - while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } - return r -} + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. -SBMH.prototype._sbmh_feed = function (data) { - const len = data.length - const needle = this._needle - const needleLength = needle.length - const lastNeedleChar = needle[needleLength - 1] - // Positive: points to a position in `data` - // pos == 3 points to data[3] - // Negative: points to a position in the lookbehind buffer - // pos == -2 points to lookbehind[lookbehind_size - 2] - let pos = -this._lookbehind_size - let ch + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock - if (pos < 0) { - // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool - // search with character lookup code that considers both the - // lookbehind buffer and the current round's haystack data. - // - // Loop until - // there is a match. - // or until - // we've moved past the position that requires the - // lookbehind buffer. In this case we switch to the - // optimized loop. - // or until - // the character to look at lies outside the haystack. - while (pos < 0 && pos <= len - needleLength) { - ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - if ( - ch === lastNeedleChar && - this._sbmh_memcmp(data, pos, needleLength - 1) - ) { - this._lookbehind_size = 0 - ++this.matches - this.emit('info', true) + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - return (this._bufpos = pos + needleLength) - } - pos += this._occ[ch] - } + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval - // No match. - if (pos < 0) { - // There's too few data for Boyer-Moore-Horspool to run, - // so let's use a different algorithm to skip as much as - // we can. - // Forward pos until - // the trailing part of lookbehind + data - // looks like the beginning of the needle - // or until - // pos == 0 - while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } - } + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested - if (pos >= 0) { - // Discard lookbehind buffer. - this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) - this._lookbehind_size = 0 - } else { - // Cut off part of the lookbehind buffer that has - // been processed and append the entire haystack - // into it. - const bytesToCutOff = this._lookbehind_size + pos - if (bytesToCutOff > 0) { - // The cut off data is guaranteed not to contain the needle. - this.emit('info', false, this._lookbehind, 0, bytesToCutOff) - } - this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, - this._lookbehind_size - bytesToCutOff) - this._lookbehind_size -= bytesToCutOff + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - data.copy(this._lookbehind, this._lookbehind_size) - this._lookbehind_size += len + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - this._bufpos = len - return len - } + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; } - pos += (pos >= 0) * this._bufpos + return buf || (0, _stringify.default)(b); +} - // Lookbehind buffer is now empty. We only need to check if the - // needle is in the haystack. - if (data.indexOf(needle, pos) !== -1) { - pos = data.indexOf(needle, pos) - ++this.matches - if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } +var _default = v1; +exports["default"] = _default; - return (this._bufpos = pos + needleLength) - } else { - pos = len - needleLength - } +/***/ }), - // There was no match. If there's trailing haystack data that we cannot - // match yet using the Boyer-Moore-Horspool algorithm (because the trailing - // data is less than the needle size) then match using a modified - // algorithm that starts matching from the beginning instead of the end. - // Whatever trailing data is left after running this algorithm is added to - // the lookbehind buffer. - while ( - pos < len && - ( - data[pos] !== needle[0] || - ( - (Buffer.compare( - data.subarray(pos, pos + len - pos), - needle.subarray(0, len - pos) - ) !== 0) - ) - ) - ) { - ++pos - } - if (pos < len) { - data.copy(this._lookbehind, 0, pos, pos + (len - pos)) - this._lookbehind_size = len - pos - } +/***/ 29411: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Everything until pos is guaranteed not to contain needle data. - if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } +"use strict"; - this._bufpos = len - return len -} -SBMH.prototype._sbmh_lookup_char = function (data, pos) { - return (pos < 0) - ? this._lookbehind[this._lookbehind_size + pos] - : data[pos] -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -SBMH.prototype._sbmh_memcmp = function (data, pos, len) { - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } - } - return true -} +var _v = _interopRequireDefault(__nccwpck_require__(74624)); -module.exports = SBMH +var _md = _interopRequireDefault(__nccwpck_require__(84953)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; /***/ }), -/***/ 3438: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 74624: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const WritableStream = (__nccwpck_require__(4492).Writable) -const { inherits } = __nccwpck_require__(7261) -const Dicer = __nccwpck_require__(2856) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; -const MultipartParser = __nccwpck_require__(415) -const UrlencodedParser = __nccwpck_require__(6780) -const parseParams = __nccwpck_require__(4426) +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); -function Busboy (opts) { - if (!(this instanceof Busboy)) { return new Busboy(opts) } +var _parse = _interopRequireDefault(__nccwpck_require__(99645)); - if (typeof opts !== 'object') { - throw new TypeError('Busboy expected an options-Object.') - } - if (typeof opts.headers !== 'object') { - throw new TypeError('Busboy expected an options-Object with headers-attribute.') - } - if (typeof opts.headers['content-type'] !== 'string') { - throw new TypeError('Missing Content-Type-header.') - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const { - headers, - ...streamOptions - } = opts +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape - this.opts = { - autoDestroy: false, - ...streamOptions + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); } - WritableStream.call(this, this.opts) - this._done = false - this._parser = this.getParserByHeaders(headers) - this._finished = false + return bytes; } -inherits(Busboy, WritableStream) -Busboy.prototype.emit = function (ev) { - if (ev === 'finish') { - if (!this._done) { - this._parser?.end() - return - } else if (this._finished) { - return +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); } - this._finished = true - } - WritableStream.prototype.emit.apply(this, arguments) -} -Busboy.prototype.getParserByHeaders = function (headers) { - const parsed = parseParams(headers['content-type']) + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } - const cfg = { - defCharset: this.opts.defCharset, - fileHwm: this.opts.fileHwm, - headers, - highWaterMark: this.opts.highWaterMark, - isPartAFile: this.opts.isPartAFile, - limits: this.opts.limits, - parsedConType: parsed, - preservePath: this.opts.preservePath - } + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` - if (MultipartParser.detect.test(parsed[0])) { - return new MultipartParser(this, cfg) - } - if (UrlencodedParser.detect.test(parsed[0])) { - return new UrlencodedParser(this, cfg) - } - throw new Error('Unsupported Content-Type.') -} -Busboy.prototype._write = function (chunk, encoding, cb) { - this._parser.write(chunk, cb) -} + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; -module.exports = Busboy -module.exports["default"] = Busboy -module.exports.Busboy = Busboy + if (buf) { + offset = offset || 0; -module.exports.Dicer = Dicer + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + return buf; + } -/***/ }), + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) -/***/ 415: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support -// TODO: -// * support 1 nested multipart level -// (see second multipart example here: -// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) -// * support limits.fieldNameSize -// -- this will require modifications to utils.parseParams + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} -const { Readable } = __nccwpck_require__(4492) -const { inherits } = __nccwpck_require__(7261) +/***/ }), -const Dicer = __nccwpck_require__(2856) +/***/ 83424: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -const parseParams = __nccwpck_require__(4426) -const decodeText = __nccwpck_require__(9136) -const basename = __nccwpck_require__(496) -const getLimit = __nccwpck_require__(9692) +"use strict"; -const RE_BOUNDARY = /^boundary$/i -const RE_FIELD = /^form-data$/i -const RE_CHARSET = /^charset$/i -const RE_FILENAME = /^filename$/i -const RE_NAME = /^name$/i -Multipart.detect = /^multipart\/form-data/i -function Multipart (boy, cfg) { - let i - let len - const self = this - let boundary - const limits = cfg.limits - const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) - const parsedConType = cfg.parsedConType || [] - const defCharset = cfg.defCharset || 'utf8' - const preservePath = cfg.preservePath - const fileOpts = { highWaterMark: cfg.fileHwm } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - for (i = 0, len = parsedConType.length; i < len; ++i) { - if (Array.isArray(parsedConType[i]) && - RE_BOUNDARY.test(parsedConType[i][0])) { - boundary = parsedConType[i][1] - break - } - } +var _rng = _interopRequireDefault(__nccwpck_require__(91430)); - function checkFinished () { - if (nends === 0 && finished && !boy._done) { - finished = false - self.end() - } - } +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); - if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) - const filesLimit = getLimit(limits, 'files', Infinity) - const fieldsLimit = getLimit(limits, 'fields', Infinity) - const partsLimit = getLimit(limits, 'parts', Infinity) - const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) - const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) +function v4(options, buf, offset) { + options = options || {}; - let nfiles = 0 - let nfields = 0 - let nends = 0 - let curFile - let curField - let finished = false + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - this._needDrain = false - this._pause = false - this._cb = undefined - this._nparts = 0 - this._boy = boy - const parserCfg = { - boundary, - maxHeaderPairs: headerPairsLimit, - maxHeaderSize: headerSizeLimit, - partHwm: fileOpts.highWaterMark, - highWaterMark: cfg.highWaterMark - } + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - this.parser = new Dicer(parserCfg) - this.parser.on('drain', function () { - self._needDrain = false - if (self._cb && !self._pause) { - const cb = self._cb - self._cb = undefined - cb() - } - }).on('part', function onPart (part) { - if (++self._nparts > partsLimit) { - self.parser.removeListener('part', onPart) - self.parser.on('part', skipPart) - boy.hitPartsLimit = true - boy.emit('partsLimit') - return skipPart(part) - } + if (buf) { + offset = offset || 0; - // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let - // us emit 'end' early since we know the part has ended if we are already - // seeing the next part - if (curField) { - const field = curField - field.emit('end') - field.removeAllListeners('end') + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; } - part.on('header', function (header) { - let contype - let fieldname - let parsed - let charset - let encoding - let filename - let nsize = 0 + return buf; + } - if (header['content-type']) { - parsed = parseParams(header['content-type'][0]) - if (parsed[0]) { - contype = parsed[0].toLowerCase() - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_CHARSET.test(parsed[i][0])) { - charset = parsed[i][1].toLowerCase() - break - } - } - } - } + return (0, _stringify.default)(rnds); +} - if (contype === undefined) { contype = 'text/plain' } - if (charset === undefined) { charset = defCharset } +var _default = v4; +exports["default"] = _default; - if (header['content-disposition']) { - parsed = parseParams(header['content-disposition'][0]) - if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_NAME.test(parsed[i][0])) { - fieldname = parsed[i][1] - } else if (RE_FILENAME.test(parsed[i][0])) { - filename = parsed[i][1] - if (!preservePath) { filename = basename(filename) } - } - } - } else { return skipPart(part) } +/***/ }), - if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } +/***/ 64051: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let onData, - onEnd +"use strict"; - if (isPartAFile(fieldname, contype, filename)) { - // file/binary field - if (nfiles === filesLimit) { - if (!boy.hitFilesLimit) { - boy.hitFilesLimit = true - boy.emit('filesLimit') - } - return skipPart(part) - } - ++nfiles +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - if (!boy._events.file) { - self.parser._ignore() - return - } +var _v = _interopRequireDefault(__nccwpck_require__(74624)); - ++nends - const file = new FileStream(fileOpts) - curFile = file - file.on('end', function () { - --nends - self._pause = false - checkFinished() - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - }) - file._read = function (n) { - if (!self._pause) { return } - self._pause = false - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - } - boy.emit('file', fieldname, file, filename, encoding, contype) +var _sha = _interopRequireDefault(__nccwpck_require__(77416)); - onData = function (data) { - if ((nsize += data.length) > fileSizeLimit) { - const extralen = fileSizeLimit - nsize + data.length - if (extralen > 0) { file.push(data.slice(0, extralen)) } - file.truncated = true - file.bytesRead = fileSizeLimit - part.removeAllListeners('data') - file.emit('limit') - return - } else if (!file.push(data)) { self._pause = true } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - file.bytesRead = nsize - } +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; - onEnd = function () { - curFile = undefined - file.push(null) - } - } else { - // non-file field - if (nfields === fieldsLimit) { - if (!boy.hitFieldsLimit) { - boy.hitFieldsLimit = true - boy.emit('fieldsLimit') - } - return skipPart(part) - } +/***/ }), - ++nfields - ++nends - let buffer = '' - let truncated = false - curField = part +/***/ 20937: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - onData = function (data) { - if ((nsize += data.length) > fieldSizeLimit) { - const extralen = (fieldSizeLimit - (nsize - data.length)) - buffer += data.toString('binary', 0, extralen) - truncated = true - part.removeAllListeners('data') - } else { buffer += data.toString('binary') } - } +"use strict"; - onEnd = function () { - curField = undefined - if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } - boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) - --nends - checkFinished() - } - } - /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become - broken. Streams2/streams3 is a huge black box of confusion, but - somehow overriding the sync state seems to fix things again (and still - seems to work for previous node versions). - */ - part._readableState.sync = false +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - part.on('data', onData) - part.on('end', onEnd) - }).on('error', function (err) { - if (curFile) { curFile.emit('error', err) } - }) - }).on('error', function (err) { - boy.emit('error', err) - }).on('finish', function () { - finished = true - checkFinished() - }) -} +var _regex = _interopRequireDefault(__nccwpck_require__(94323)); -Multipart.prototype.write = function (chunk, cb) { - const r = this.parser.write(chunk) - if (r && !this._pause) { - cb() - } else { - this._needDrain = !r - this._cb = cb - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); } -Multipart.prototype.end = function () { - const self = this +var _default = validate; +exports["default"] = _default; - if (self.parser.writable) { - self.parser.end() - } else if (!self._boy._done) { - process.nextTick(function () { - self._boy._done = true - self._boy.emit('finish') - }) - } -} +/***/ }), -function skipPart (part) { - part.resume() -} +/***/ 35611: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function FileStream (opts) { - Readable.call(this, opts) +"use strict"; - this.bytesRead = 0 - this.truncated = false -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -inherits(FileStream, Readable) +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); -FileStream.prototype._read = function (n) {} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } -module.exports = Multipart + return parseInt(uuid.substr(14, 1), 16); +} +var _default = version; +exports["default"] = _default; /***/ }), -/***/ 6780: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 81040: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports["default"] = once; +//# sourceMappingURL=index.js.map -const Decoder = __nccwpck_require__(9730) -const decodeText = __nccwpck_require__(9136) -const getLimit = __nccwpck_require__(9692) +/***/ }), -const RE_CHARSET = /^charset$/i +/***/ 61659: +/***/ ((module, exports, __nccwpck_require__) => { -UrlEncoded.detect = /^application\/x-www-form-urlencoded/i -function UrlEncoded (boy, cfg) { - const limits = cfg.limits - const parsedConType = cfg.parsedConType - this.boy = boy +"use strict"; +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ - this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) - this.fieldsLimit = getLimit(limits, 'fields', Infinity) - let charset - for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var - if (Array.isArray(parsedConType[i]) && - RE_CHARSET.test(parsedConType[i][0])) { - charset = parsedConType[i][1].toLowerCase() - break - } - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - if (charset === undefined) { charset = cfg.defCharset || 'utf8' } +var eventTargetShim = __nccwpck_require__(84697); - this.decoder = new Decoder() - this.charset = charset - this._fields = 0 - this._state = 'key' - this._checkingBytes = true - this._bytesKey = 0 - this._bytesVal = 0 - this._key = '' - this._val = '' - this._keyTrunc = false - this._valTrunc = false - this._hitLimit = false +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); } -UrlEncoded.prototype.write = function (data, cb) { - if (this._fields === this.fieldsLimit) { - if (!this.boy.hitFieldsLimit) { - this.boy.hitFieldsLimit = true - this.boy.emit('fieldsLimit') +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); } - return cb() - } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} - let idxeq; let idxamp; let i; let p = 0; const len = data.length +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; - while (p < len) { - if (this._state === 'key') { - idxeq = idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x3D/* = */) { - idxeq = i - break - } else if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesKey } - } +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map - if (idxeq !== undefined) { - // key with assignment - if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } - this._state = 'val' - this._hitLimit = false - this._checkingBytes = true - this._val = '' - this._bytesVal = 0 - this._valTrunc = false - this.decoder.reset() +/***/ }), - p = idxeq + 1 - } else if (idxamp !== undefined) { - // key with no assignment - ++this._fields - let key; const keyTrunc = this._keyTrunc - if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } +/***/ 8348: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() +"use strict"; - if (key.length) { - this.boy.emit('field', decodeText(key, 'binary', this.charset), - '', - keyTrunc, - false) - } +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._keyTrunc = true - } - } else { - if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } - p = len - } - } else { - idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesVal } - } +/***/ }), - if (idxamp !== undefined) { - ++this._fields - if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - this._state = 'key' +/***/ 70694: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() +"use strict"; - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._val === '' && this.fieldSizeLimit === 0) || - (this._bytesVal = this._val.length) === this.fieldSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._valTrunc = true +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const http = __importStar(__nccwpck_require__(13685)); +const https_1 = __nccwpck_require__(95687); +__exportStar(__nccwpck_require__(8348), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; } - } else { - if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } - p = len - } } - } - cb() -} - -UrlEncoded.prototype.end = function () { - if (this.boy._done) { return } - - if (this._state === 'key' && this._key.length > 0) { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - '', - this._keyTrunc, - false) - } else if (this._state === 'val') { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - } - this.boy._done = true - this.boy.emit('finish') -} - -module.exports = UrlEncoded - - -/***/ }), - -/***/ 9730: -/***/ ((module) => { - -"use strict"; - - -const RE_PLUS = /\+/g - -const HEX = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 -] - -function Decoder () { - this.buffer = undefined -} -Decoder.prototype.write = function (str) { - // Replace '+' with ' ' before decoding - str = str.replace(RE_PLUS, ' ') - let res = '' - let i = 0; let p = 0; const len = str.length - for (; i < len; ++i) { - if (this.buffer !== undefined) { - if (!HEX[str.charCodeAt(i)]) { - res += '%' + this.buffer - this.buffer = undefined - --i // retry character - } else { - this.buffer += str[i] - ++p - if (this.buffer.length === 2) { - res += String.fromCharCode(parseInt(this.buffer, 16)) - this.buffer = undefined + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; } - } - } else if (str[i] === '%') { - if (i > p) { - res += str.substring(p, i) - p = i - } - this.buffer = '' - ++p } - } - if (p < len && this.buffer === undefined) { res += str.substring(p) } - return res -} -Decoder.prototype.reset = function () { - this.buffer = undefined } - -module.exports = Decoder - +exports.Agent = Agent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 496: +/***/ 61546: /***/ ((module) => { "use strict"; -module.exports = function basename (path) { - if (typeof path !== 'string') { return '' } - for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var - switch (path.charCodeAt(i)) { - case 0x2F: // '/' - case 0x5C: // '\' - path = path.slice(i + 1) - return (path === '..' || path === '.' ? '' : path) - } - } - return (path === '..' || path === '.' ? '' : path) -} - - -/***/ }), - -/***/ 9136: -/***/ ((module) => { +const arrify = value => { + if (value === null || value === undefined) { + return []; + } -"use strict"; + if (Array.isArray(value)) { + return value; + } + if (typeof value === 'string') { + return [value]; + } -// Node has always utf-8 -const utf8Decoder = new TextDecoder('utf-8') -const textDecoders = new Map([ - ['utf-8', utf8Decoder], - ['utf8', utf8Decoder] -]) + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } -function decodeText (text, textEncoding, destEncoding) { - if (text) { - if (textDecoders.has(destEncoding)) { - try { - return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) - } catch (e) { } - } else { - try { - textDecoders.set(destEncoding, new TextDecoder(destEncoding)) - return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) - } catch (e) { } - } - } - return text -} + return [value]; +}; -module.exports = decodeText +module.exports = arrify; /***/ }), -/***/ 9692: -/***/ ((module) => { +/***/ 33415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +// Packages +var retrier = __nccwpck_require__(71604); +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; -module.exports = function getLimit (limits, name, defaultLimit) { - if ( - !limits || - limits[name] === undefined || - limits[name] === null - ) { return defaultLimit } + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } - if ( - typeof limits[name] !== 'number' || - isNaN(limits[name]) - ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + op = retrier.operation(options); - return limits[name] -} + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) + function bail(err) { + reject(err || new Error('Aborted')); + } -/***/ }), + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } -/***/ 4426: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } + } -"use strict"; + function runAttempt(num) { + var val; + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } -const decodeText = __nccwpck_require__(9136) + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } -const RE_ENCODED = /%([a-fA-F0-9]{2})/g + op.attempt(runAttempt); + } -function encodedReplacer (match, byte) { - return String.fromCharCode(parseInt(byte, 16)) + return new Promise(run); } -function parseParams (str) { - const res = [] - let state = 'key' - let charset = '' - let inquote = false - let escaping = false - let p = 0 - let tmp = '' +module.exports = retry; - for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var - const char = str[i] - if (char === '\\' && inquote) { - if (escaping) { escaping = false } else { - escaping = true - continue - } - } else if (char === '"') { - if (!escaping) { - if (inquote) { - inquote = false - state = 'key' - } else { inquote = true } - continue - } else { escaping = false } - } else { - if (escaping && inquote) { tmp += '\\' } - escaping = false - if ((state === 'charset' || state === 'lang') && char === "'") { - if (state === 'charset') { - state = 'lang' - charset = tmp.substring(1) - } else { state = 'value' } - tmp = '' - continue - } else if (state === 'key' && - (char === '*' || char === '=') && - res.length) { - if (char === '*') { state = 'charset' } else { state = 'value' } - res[p] = [tmp, undefined] - tmp = '' - continue - } else if (!inquote && char === ';') { - state = 'key' - if (charset) { - if (tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } - charset = '' - } else if (tmp.length) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } - tmp = '' - ++p - continue - } else if (!inquote && (char === ' ' || char === '\t')) { continue } - } - tmp += char - } - if (charset && tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } else if (tmp) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - if (res[p] === undefined) { - if (tmp) { res[p] = tmp } - } else { res[p][1] = tmp } +/***/ }), - return res -} +/***/ 14812: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = parseParams +module.exports = +{ + parallel : __nccwpck_require__(8210), + serial : __nccwpck_require__(50445), + serialOrdered : __nccwpck_require__(3578) +}; /***/ }), -/***/ 6412: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1700: +/***/ ((module) => { -"use strict"; +// API +module.exports = abort; -/*! - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ResourceStream = exports.paginator = exports.Paginator = void 0; -/*! - * @module common/paginator - */ -const arrify = __nccwpck_require__(1546); -const extend = __nccwpck_require__(8171); -const resource_stream_1 = __nccwpck_require__(2199); -Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); -/*! Developer Documentation - * - * paginator is used to auto-paginate `nextQuery` methods as well as - * streamifying them. - * - * Before: - * - * search.query('done=true', function(err, results, nextQuery) { - * search.query(nextQuery, function(err, results, nextQuery) {}); - * }); - * - * After: - * - * search.query('done=true', function(err, results) {}); +/** + * Aborts leftover active jobs * - * Methods to extend should be written to accept callbacks and return a - * `nextQuery`. + * @param {object} state - current state object */ -class Paginator { - /** - * Cache the original method, then overwrite it on the Class's prototype. - * - * @param {function} Class - The parent class of the methods to extend. - * @param {string|string[]} methodNames - Name(s) of the methods to extend. - */ - // tslint:disable-next-line:variable-name - extend(Class, methodNames) { - methodNames = arrify(methodNames); - methodNames.forEach(methodName => { - const originalMethod = Class.prototype[methodName]; - // map the original method to a private member - Class.prototype[methodName + '_'] = originalMethod; - // overwrite the original to auto-paginate - /* eslint-disable @typescript-eslint/no-explicit-any */ - Class.prototype[methodName] = function (...args) { - const parsedArguments = paginator.parseArguments_(args); - return paginator.run_(parsedArguments, originalMethod.bind(this)); - }; - }); - } - /** - * Wraps paginated API calls in a readable object stream. - * - * This method simply calls the nextQuery recursively, emitting results to a - * stream. The stream ends when `nextQuery` is null. - * - * `maxResults` will act as a cap for how many results are fetched and emitted - * to the stream. - * - * @param {string} methodName - Name of the method to streamify. - * @return {function} - Wrapped function. - */ - /* eslint-disable @typescript-eslint/no-explicit-any */ - streamify(methodName) { - return function ( - /* eslint-disable @typescript-eslint/no-explicit-any */ - ...args) { - const parsedArguments = paginator.parseArguments_(args); - const originalMethod = this[methodName + '_'] || this[methodName]; - return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); - }; - } - /** - * Parse a pseudo-array `arguments` for a query and callback. - * - * @param {array} args - The original `arguments` pseduo-array that the original - * method received. - */ - /* eslint-disable @typescript-eslint/no-explicit-any */ - parseArguments_(args) { - let query; - let autoPaginate = true; - let maxApiCalls = -1; - let maxResults = -1; - let callback; - const firstArgument = args[0]; - const lastArgument = args[args.length - 1]; - if (typeof firstArgument === 'function') { - callback = firstArgument; - } - else { - query = firstArgument; - } - if (typeof lastArgument === 'function') { - callback = lastArgument; - } - if (typeof query === 'object') { - query = extend(true, {}, query); - // Check if the user only asked for a certain amount of results. - if (query.maxResults && typeof query.maxResults === 'number') { - // `maxResults` is used API-wide. - maxResults = query.maxResults; - } - else if (typeof query.pageSize === 'number') { - // `pageSize` is Pub/Sub's `maxResults`. - maxResults = query.pageSize; - } - if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { - maxApiCalls = query.maxApiCalls; - delete query.maxApiCalls; - } - // maxResults is the user specified limit. - if (maxResults !== -1 || query.autoPaginate === false) { - autoPaginate = false; - } - } - const parsedArguments = { - query: query || {}, - autoPaginate, - maxApiCalls, - maxResults, - callback, - }; - parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); - delete parsedArguments.streamOptions.autoPaginate; - delete parsedArguments.streamOptions.maxResults; - delete parsedArguments.streamOptions.pageSize; - return parsedArguments; - } - /** - * This simply checks to see if `autoPaginate` is set or not, if it's true - * then we buffer all results, otherwise simply call the original method. - * - * @param {array} parsedArguments - Parsed arguments from the original method - * call. - * @param {object=|string=} parsedArguments.query - Query object. This is most - * commonly an object, but to make the API more simple, it can also be a - * string in some places. - * @param {function=} parsedArguments.callback - Callback function. - * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. - * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. - * @param {number} parsedArguments.maxResults - Maximum results to return. - * @param {function} originalMethod - The cached method that accepts a callback - * and returns `nextQuery` to receive more results. - */ - run_(parsedArguments, originalMethod) { - const query = parsedArguments.query; - const callback = parsedArguments.callback; - if (!parsedArguments.autoPaginate) { - return originalMethod(query, callback); - } - const results = new Array(); - let otherArgs = []; - const promise = new Promise((resolve, reject) => { - const stream = paginator.runAsStream_(parsedArguments, originalMethod); - stream - .on('error', reject) - .on('data', (data) => results.push(data)) - .on('end', () => { - otherArgs = stream._otherArgs || []; - resolve(results); - }); - }); - if (!callback) { - return promise.then(results => [results, query, ...otherArgs]); - } - promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); - } - /** - * This method simply calls the nextQuery recursively, emitting results to a - * stream. The stream ends when `nextQuery` is null. - * - * `maxResults` will act as a cap for how many results are fetched and emitted - * to the stream. - * - * @param {object=|string=} parsedArguments.query - Query object. This is most - * commonly an object, but to make the API more simple, it can also be a - * string in some places. - * @param {function=} parsedArguments.callback - Callback function. - * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. - * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. - * @param {number} parsedArguments.maxResults - Maximum results to return. - * @param {function} originalMethod - The cached method that accepts a callback - * and returns `nextQuery` to receive more results. - * @return {stream} - Readable object stream. - */ - /* eslint-disable @typescript-eslint/no-explicit-any */ - runAsStream_(parsedArguments, originalMethod) { - return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); - } +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; } -exports.Paginator = Paginator; -const paginator = new Paginator(); -exports.paginator = paginator; -//# sourceMappingURL=index.js.map + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} + /***/ }), -/***/ 2199: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 72794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var defer = __nccwpck_require__(15295); -/*! - * Copyright 2019 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ResourceStream = void 0; -const stream_1 = __nccwpck_require__(2781); -class ResourceStream extends stream_1.Transform { - constructor(args, requestFn) { - const options = Object.assign({ objectMode: true }, args.streamOptions); - super(options); - this._ended = false; - this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; - this._nextQuery = args.query; - this._reading = false; - this._requestFn = requestFn; - this._requestsMade = 0; - this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; - this._otherArgs = []; - } - /* eslint-disable @typescript-eslint/no-explicit-any */ - end(...args) { - this._ended = true; - return super.end(...args); +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); } - _read() { - if (this._reading) { - return; - } - this._reading = true; - // Wrap in a try/catch to catch input linting errors, e.g. - // an invalid BigQuery query. These errors are thrown in an - // async fashion, which makes them un-catchable by the user. - try { - this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { - if (err) { - this.destroy(err); - return; - } - this._otherArgs = otherArgs; - this._nextQuery = nextQuery; - if (this._resultsToSend !== Infinity) { - results = results.splice(0, this._resultsToSend); - this._resultsToSend -= results.length; - } - let more = true; - for (const result of results) { - if (this._ended) { - break; - } - more = this.push(result); - } - const isFinished = !this._nextQuery || this._resultsToSend < 1; - const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; - if (isFinished || madeMaxCalls) { - this.end(); - } - if (more && !this._ended) { - setImmediate(() => this._read()); - } - this._reading = false; - }); - } - catch (e) { - this.destroy(e); - } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); } + }; } -exports.ResourceStream = ResourceStream; -//# sourceMappingURL=resource-stream.js.map + /***/ }), -/***/ 3497: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 15295: +/***/ ((module) => { -"use strict"; +module.exports = defer; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; -const stream_1 = __nccwpck_require__(2781); -// Copyright 2014 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. /** - * Populate the `{{projectId}}` placeholder. + * Runs provided function on next iteration of the event loop * - * @throws {Error} If a projectId is required, but one is not provided. + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} + + +/***/ }), + +/***/ 9023: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var async = __nccwpck_require__(72794) + , abort = __nccwpck_require__(1700) + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object * - * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. - * @param {string} projectId - A projectId. If not provided - * @return {*} - The original argument with all placeholders populated. + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function replaceProjectIdToken(value, projectId) { - if (Array.isArray(value)) { - value = value.map(v => replaceProjectIdToken(v, projectId)); +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; } - if (value !== null && - typeof value === 'object' && - !(value instanceof Buffer) && - !(value instanceof stream_1.Stream) && - typeof value.hasOwnProperty === 'function') { - for (const opt in value) { - // eslint-disable-next-line no-prototype-builtins - if (value.hasOwnProperty(opt)) { - value[opt] = replaceProjectIdToken(value[opt], projectId); - } - } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); } - if (typeof value === 'string' && - value.indexOf('{{projectId}}') > -1) { - if (!projectId || projectId === '{{projectId}}') { - throw new MissingProjectIdError(); - } - value = value.replace(/{{projectId}}/g, projectId); + else + { + state.results[key] = output; } - return value; + + // return salvaged results + callback(error, state.results); + }); } -exports.replaceProjectIdToken = replaceProjectIdToken; + /** - * Custom error type for missing project ID errors. + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else */ -class MissingProjectIdError extends Error { - constructor() { - super(...arguments); - this.message = `Sorry, we cannot connect to Cloud Services without a project - ID. You may specify one with an environment variable named - "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); - } +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; } -exports.MissingProjectIdError = MissingProjectIdError; -//# sourceMappingURL=index.js.map + /***/ }), -/***/ 9203: -/***/ ((__unused_webpack_module, exports) => { +/***/ 42474: +/***/ ((module) => { -"use strict"; +// API +module.exports = state; -/* eslint-disable prefer-rest-params */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; /** - * Wraps a callback style function to conditionally return a promise. + * Creates initial state object + * for iteration over list * - * @param {function} originalMethod - The method to promisify. - * @param {object=} options - Promise options. - * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. - * @return {function} wrapped + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object */ -function promisify(originalMethod, options) { - if (originalMethod.promisified_) { - return originalMethod; +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length } - options = options || {}; - const slice = Array.prototype.slice; - // tslint:disable-next-line:no-any - const wrapper = function () { - let last; - for (last = arguments.length - 1; last >= 0; last--) { - const arg = arguments[last]; - if (typeof arg === 'undefined') { - continue; // skip trailing undefined. - } - if (typeof arg !== 'function') { - break; // non-callback last argument found. - } - return originalMethod.apply(this, arguments); - } - // peel trailing undefined. - const args = slice.call(arguments, 0, last + 1); - // tslint:disable-next-line:variable-name - let PromiseCtor = Promise; - // Because dedupe will likely create a single install of - // @google-cloud/common to be shared amongst all modules, we need to - // localize it at the Service level. - if (this && this.Promise) { - PromiseCtor = this.Promise; - } - return new PromiseCtor((resolve, reject) => { - // tslint:disable-next-line:no-any - args.push((...args) => { - const callbackArgs = slice.call(args); - const err = callbackArgs.shift(); - if (err) { - return reject(err); - } - if (options.singular && callbackArgs.length === 1) { - resolve(callbackArgs[0]); - } - else { - resolve(callbackArgs); - } - }); - originalMethod.apply(this, args); - }); - }; - wrapper.promisified_ = true; - return wrapper; + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} + + +/***/ }), + +/***/ 37942: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var abort = __nccwpck_require__(1700) + , async = __nccwpck_require__(72794) + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} + + +/***/ }), + +/***/ 8210: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} + + +/***/ }), + +/***/ 50445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var serialOrdered = __nccwpck_require__(3578); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} + + +/***/ }), + +/***/ 3578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); } -exports.promisify = promisify; -/** - * Promisifies certain Class methods. This will not promisify private or - * streaming methods. - * - * @param {module:common/service} Class - Service class. - * @param {object=} options - Configuration object. + +/* + * -- Sort methods */ -// tslint:disable-next-line:variable-name -function promisifyAll(Class, options) { - const exclude = (options && options.exclude) || []; - const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); - const methods = ownPropertyNames.filter(methodName => { - // clang-format off - return (!exclude.includes(methodName) && - typeof Class.prototype[methodName] === 'function' && // is it a function? - !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? - ); - // clang-format on - }); - methods.forEach(methodName => { - const originalMethod = Class.prototype[methodName]; - if (!originalMethod.promisified_) { - Class.prototype[methodName] = exports.promisify(originalMethod, options); - } - }); -} -exports.promisifyAll = promisifyAll; + /** - * Wraps a promisy type function to conditionally call a callback function. + * sort helper to sort array elements in ascending order * - * @param {function} originalMethod - The method to callbackify. - * @param {object=} options - Callback options. - * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. - * @return {function} wrapped + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result */ -function callbackify(originalMethod) { - if (originalMethod.callbackified_) { - return originalMethod; - } - // tslint:disable-next-line:no-any - const wrapper = function () { - if (typeof arguments[arguments.length - 1] !== 'function') { - return originalMethod.apply(this, arguments); - } - const cb = Array.prototype.pop.call(arguments); - originalMethod.apply(this, arguments).then( - // tslint:disable-next-line:no-any - (res) => { - res = Array.isArray(res) ? res : [res]; - cb(null, ...res); - }, (err) => cb(err)); - }; - wrapper.callbackified_ = true; - return wrapper; +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; } -exports.callbackify = callbackify; + /** - * Callbackifies certain Class methods. This will not callbackify private or - * streaming methods. + * sort helper to sort array elements in descending order * - * @param {module:common/service} Class - Service class. - * @param {object=} options - Configuration object. + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result */ -function callbackifyAll( -// tslint:disable-next-line:variable-name -Class, options) { - const exclude = (options && options.exclude) || []; - const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); - const methods = ownPropertyNames.filter(methodName => { - // clang-format off - return (!exclude.includes(methodName) && - typeof Class.prototype[methodName] === 'function' && // is it a function? - !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? - ); - // clang-format on - }); - methods.forEach(methodName => { - const originalMethod = Class.prototype[methodName]; - if (!originalMethod.callbackified_) { - Class.prototype[methodName] = exports.callbackify(originalMethod); - } - }); +function descending(a, b) +{ + return -1 * ascending(a, b); } -exports.callbackifyAll = callbackifyAll; -//# sourceMappingURL=index.js.map + /***/ }), -/***/ 4458: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 91403: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var CombinedStream = __nccwpck_require__(85443); +var util = __nccwpck_require__(73837); +var path = __nccwpck_require__(71017); +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var parseUrl = (__nccwpck_require__(57310).parse); +var fs = __nccwpck_require__(57147); +var Stream = (__nccwpck_require__(12781).Stream); +var mime = __nccwpck_require__(43583); +var asynckit = __nccwpck_require__(14812); +var populate = __nccwpck_require__(47027); +// Public API +module.exports = FormData; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); } -})); +}; + +FormData.prototype._lengthRetriever = function(value, callback) { -var _v = _interopRequireDefault(__nccwpck_require__(3542)); + if (value.hasOwnProperty('fd')) { -var _v2 = _interopRequireDefault(__nccwpck_require__(9411)); + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { -var _v3 = _interopRequireDefault(__nccwpck_require__(3424)); + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); -var _v4 = _interopRequireDefault(__nccwpck_require__(4051)); + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { -var _nil = _interopRequireDefault(__nccwpck_require__(6570)); + var fileSize; -var _version = _interopRequireDefault(__nccwpck_require__(5611)); + if (err) { + callback(err); + return; + } -var _validate = _interopRequireDefault(__nccwpck_require__(937)); + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } -var _stringify = _interopRequireDefault(__nccwpck_require__(2122)); + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); -var _parse = _interopRequireDefault(__nccwpck_require__(9645)); + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // something else + } else { + callback('Unknown stream'); + } +}; -/***/ }), +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } -/***/ 4953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); -"use strict"; + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + // skip nullish headers. + if (header == null) { + continue; + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } } - return _crypto.default.createHash('md5').update(bytes).digest(); -} + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; -var _default = md5; -exports["default"] = _default; +FormData.prototype._getContentDisposition = function(value, options) { -/***/ }), + var filename + , contentDisposition + ; -/***/ 6570: -/***/ ((__unused_webpack_module, exports) => { + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } -"use strict"; + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + return contentDisposition; +}; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; +FormData.prototype._getContentType = function(value, options) { -/***/ }), + // use custom content-type above all + var contentType = options.contentType; -/***/ 9645: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } -"use strict"; + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } -var _validate = _interopRequireDefault(__nccwpck_require__(937)); + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return contentType; +}; -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ + next(footer); + }.bind(this); +}; - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} + return formHeaders; +}; -var _default = parse; -exports["default"] = _default; +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; -/***/ }), +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } -/***/ 4323: -/***/ ((__unused_webpack_module, exports) => { + return this._boundary; +}; -"use strict"; +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; -/***/ }), +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } -/***/ 1430: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this._boundary = boundary; +}; -"use strict"; +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + return knownLength; +}; -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } -let poolPtr = rnds8Pool.length; + return hasKnownLength; +}; -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; - poolPtr = 0; + if (this._streams.length) { + knownLength += this._lastBoundary().length; } - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } -/***/ }), + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } -/***/ 7416: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + values.forEach(function(length) { + knownLength += length; + }); -"use strict"; + cb(null, knownLength); + }); +}; +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // use custom params + } else { -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } } - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); -/***/ 2122: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } -"use strict"; + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + // add content length + if (length) { + request.setHeader('Content-Length', length); + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + this.pipe(request); + if (cb) { + var onResponse; -var _validate = _interopRequireDefault(__nccwpck_require__(937)); + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return cb.call(this, error, responce); + }; -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; + onResponse = callback.bind(this, null); -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields + return request; +}; - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); } +}; - return uuid; -} +FormData.prototype.toString = function () { + return '[object FormData]'; +}; -var _default = stringify; -exports["default"] = _default; /***/ }), -/***/ 3542: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +/***/ 47027: +/***/ ((module) => { +// populates missing values +module.exports = function(dst, src) { -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); -var _rng = _interopRequireDefault(__nccwpck_require__(1430)); + return dst; +}; -var _stringify = _interopRequireDefault(__nccwpck_require__(2122)); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ }), -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; +/***/ 9417: +/***/ ((module) => { -let _clockseq; // Previous uuid creation time +"use strict"; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + var r = range(a, b, str); -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval + begs = []; + left = str.length; + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested + bi = str.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + if (begs.length) { + result = [ left, right ]; + } } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` + return result; +} - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` +/***/ }), - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version +/***/ 26463: +/***/ ((__unused_webpack_module, exports) => { - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) +"use strict"; - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - b[i++] = clockseq & 0xff; // `node` +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array - return buf || (0, _stringify.default)(b); +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i } -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 9411: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 +function getLens (b64) { + var len = b64.length -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } -var _v = _interopRequireDefault(__nccwpck_require__(4624)); + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len -var _md = _interopRequireDefault(__nccwpck_require__(4953)); + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return [validLen, placeHoldersLen] +} -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} -/***/ }), +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} -/***/ 4624: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] -"use strict"; + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + var curByte = 0 -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen -var _stringify = _interopRequireDefault(__nccwpck_require__(2122)); + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } -var _parse = _interopRequireDefault(__nccwpck_require__(9645)); + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape + return arr +} - const bytes = []; +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) } - - return bytes; + return output.join('') } -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` + return parts.join('') +} - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; +/***/ }), - if (buf) { - offset = offset || 0; +/***/ 83682: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } +var register = __nccwpck_require__(44670); +var addHook = __nccwpck_require__(5549); +var removeHook = __nccwpck_require__(6819); - return buf; - } +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind; +var bindable = bind.bind(bind); - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); +} +function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support +function HookCollection() { + var state = { + registry: {}, + }; + var hook = register.bind(null, state); + bindApi(hook, state); - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; + return hook; +} + +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); } +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); + +module.exports = Hook; +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; + + /***/ }), -/***/ 3424: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5549: +/***/ ((module) => { -"use strict"; +module.exports = addHook; +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } -var _rng = _interopRequireDefault(__nccwpck_require__(1430)); + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } -var _stringify = _interopRequireDefault(__nccwpck_require__(2122)); + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} -function v4(options, buf, offset) { - options = options || {}; - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` +/***/ }), +/***/ 44670: +/***/ ((module) => { - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided +module.exports = register; - if (buf) { - offset = offset || 0; +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } + if (!options) { + options = {}; + } - return buf; + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); } - return (0, _stringify.default)(rnds); + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); } -var _default = v4; -exports["default"] = _default; /***/ }), -/***/ 4051: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +/***/ 6819: +/***/ ((module) => { +module.exports = removeHook; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } -var _v = _interopRequireDefault(__nccwpck_require__(4624)); + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); -var _sha = _interopRequireDefault(__nccwpck_require__(7416)); + if (index === -1) { + return; + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + state.registry[name].splice(index, 1); +} -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; /***/ }), -/***/ 937: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 87558: +/***/ (function(module) { -"use strict"; +;(function (globalObject) { + 'use strict'; +/* + * bignumber.js v9.1.2 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2022 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _regex = _interopRequireDefault(__nccwpck_require__(4323)); + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, -var _default = validate; -exports["default"] = _default; + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 -/***/ }), -/***/ 5611: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), -"use strict"; + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _validate = _interopRequireDefault(__nccwpck_require__(937)); + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 - return parseInt(uuid.substr(14, 1), 16); -} + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] -var _default = version; -exports["default"] = _default; + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX -/***/ }), + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX -/***/ 1040: -/***/ ((__unused_webpack_module, exports) => { + // RANGE : [MIN_EXP, MAX_EXP] -"use strict"; + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX -Object.defineProperty(exports, "__esModule", ({ value: true })); -function once(emitter, name, { signal } = {}) { - return new Promise((resolve, reject) => { - function cleanup() { - signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); - emitter.removeListener(name, onEvent); - emitter.removeListener('error', onError); - } - function onEvent(...args) { - cleanup(); - resolve(args); - } - function onError(err) { - cleanup(); - reject(err); - } - signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); - emitter.on(name, onEvent); - emitter.on('error', onError); - }); -} -exports["default"] = once; -//# sourceMappingURL=index.js.map + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX -/***/ }), + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false -/***/ 1659: -/***/ ((module, exports, __nccwpck_require__) => { + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 -"use strict"; -/** - * @author Toru Nagashima - * See LICENSE file in root directory for full license. - */ + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, -Object.defineProperty(exports, "__esModule", ({ value: true })); + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; -var eventTargetShim = __nccwpck_require__(4697); -/** - * The signal class. - * @see https://dom.spec.whatwg.org/#abortsignal - */ -class AbortSignal extends eventTargetShim.EventTarget { - /** - * AbortSignal cannot be constructed directly. - */ - constructor() { - super(); - throw new TypeError("AbortSignal cannot be constructed directly"); - } - /** - * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. - */ - get aborted() { - const aborted = abortedFlags.get(this); - if (typeof aborted !== "boolean") { - throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); - } - return aborted; - } -} -eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); -/** - * Create an AbortSignal object. - */ -function createAbortSignal() { - const signal = Object.create(AbortSignal.prototype); - eventTargetShim.EventTarget.call(signal); - abortedFlags.set(signal, false); - return signal; -} -/** - * Abort a given signal. - */ -function abortSignal(signal) { - if (abortedFlags.get(signal) !== false) { - return; - } - abortedFlags.set(signal, true); - signal.dispatchEvent({ type: "abort" }); -} -/** - * Aborted flag for each instances. - */ -const abortedFlags = new WeakMap(); -// Properties should be enumerable. -Object.defineProperties(AbortSignal.prototype, { - aborted: { enumerable: true }, -}); -// `toString()` should return `"[object AbortSignal]"` -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortSignal", - }); -} + //------------------------------------------------------------------------------------------ -/** - * The AbortController. - * @see https://dom.spec.whatwg.org/#abortcontroller - */ -class AbortController { - /** - * Initialize this controller. - */ - constructor() { - signals.set(this, createAbortSignal()); - } - /** - * Returns the `AbortSignal` object associated with this object. - */ - get signal() { - return getSignal(this); - } - /** - * Abort and signal to any observers that the associated activity is to be aborted. + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. */ - abort() { - abortSignal(getSignal(this)); - } -} -/** - * Associated signals. - */ -const signals = new WeakMap(); -/** - * Get the associated signal of a given controller. - */ -function getSignal(controller) { - const signal = signals.get(controller); - if (signal == null) { - throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); - } - return signal; -} -// Properties should be enumerable. -Object.defineProperties(AbortController.prototype, { - signal: { enumerable: true }, - abort: { enumerable: true }, -}); -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortController", - }); -} + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; -exports.AbortController = AbortController; -exports.AbortSignal = AbortSignal; -exports["default"] = AbortController; + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); -module.exports = AbortController -module.exports.AbortController = module.exports["default"] = AbortController -module.exports.AbortSignal = AbortSignal -//# sourceMappingURL=abort-controller.js.map + if (b == null) { + if (v && v._isBigNumber === true) { + x.s = v.s; -/***/ }), + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } -/***/ 8348: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + return; + } -"use strict"; + if ((isNum = typeof v == 'number') && v * 0 == 0) { -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.req = exports.json = exports.toBuffer = void 0; -const http = __importStar(__nccwpck_require__(3685)); -const https = __importStar(__nccwpck_require__(5687)); -async function toBuffer(stream) { - let length = 0; - const chunks = []; - for await (const chunk of stream) { - length += chunk.length; - chunks.push(chunk); - } - return Buffer.concat(chunks, length); -} -exports.toBuffer = toBuffer; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function json(stream) { - const buf = await toBuffer(stream); - const str = buf.toString('utf8'); - try { - return JSON.parse(str); - } - catch (_err) { - const err = _err; - err.message += ` (input: ${str})`; - throw err; - } -} -exports.json = json; -function req(url, opts = {}) { - const href = typeof url === 'string' ? url : url.href; - const req = (href.startsWith('https:') ? https : http).request(url, opts); - const promise = new Promise((resolve, reject) => { - req - .once('response', resolve) - .once('error', reject) - .end(); - }); - req.then = promise.then.bind(promise); - return req; -} -exports.req = req; -//# sourceMappingURL=helpers.js.map + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } -/***/ }), + return; + } -/***/ 694: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + str = String(v); + } else { -"use strict"; + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Agent = void 0; -const net = __importStar(__nccwpck_require__(1808)); -const http = __importStar(__nccwpck_require__(3685)); -const https_1 = __nccwpck_require__(5687); -__exportStar(__nccwpck_require__(8348), exports); -const INTERNAL = Symbol('AgentBaseInternalState'); -class Agent extends http.Agent { - constructor(opts) { - super(opts); - this[INTERNAL] = {}; - } - /** - * Determine whether this is an `http` or `https` request. - */ - isSecureEndpoint(options) { - if (options) { - // First check the `secureEndpoint` property explicitly, since this - // means that a parent `Agent` is "passing through" to this instance. - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if (typeof options.secureEndpoint === 'boolean') { - return options.secureEndpoint; - } - // If no explicit `secure` endpoint, check if `protocol` property is - // set. This will usually be the case since using a full string URL - // or `URL` instance should be the most common usage. - if (typeof options.protocol === 'string') { - return options.protocol === 'https:'; - } - } - // Finally, if no `protocol` property was set, then fall back to - // checking the stack trace of the current call stack, and try to - // detect the "https" module. - const { stack } = new Error(); - if (typeof stack !== 'string') - return false; - return stack - .split('\n') - .some((l) => l.indexOf('(https.js:') !== -1 || - l.indexOf('node:https:') !== -1); - } - // In order to support async signatures in `connect()` and Node's native - // connection pooling in `http.Agent`, the array of sockets for each origin - // has to be updated synchronously. This is so the length of the array is - // accurate when `addRequest()` is next called. We achieve this by creating a - // fake socket and adding it to `sockets[origin]` and incrementing - // `totalSocketCount`. - incrementSockets(name) { - // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no - // need to create a fake socket because Node.js native connection pooling - // will never be invoked. - if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { - return null; - } - // All instances of `sockets` are expected TypeScript errors. The - // alternative is to add it as a private property of this class but that - // will break TypeScript subclassing. - if (!this.sockets[name]) { - // @ts-expect-error `sockets` is readonly in `@types/node` - this.sockets[name] = []; + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; } - const fakeSocket = new net.Socket({ writable: false }); - this.sockets[name].push(fakeSocket); - // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` - this.totalSocketCount++; - return fakeSocket; - } - decrementSockets(name, socket) { - if (!this.sockets[name] || socket === null) { - return; + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; } - const sockets = this.sockets[name]; - const index = sockets.indexOf(socket); - if (index !== -1) { - sockets.splice(index, 1); - // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` - this.totalSocketCount--; - if (sockets.length === 0) { - // @ts-expect-error `sockets` is readonly in `@types/node` - delete this.sockets[name]; - } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); } - } - // In order to properly update the socket pool, we need to call `getName()` on - // the core `https.Agent` if it is a secureEndpoint. - getName(options) { - const secureEndpoint = typeof options.secureEndpoint === 'boolean' - ? options.secureEndpoint - : this.isSecureEndpoint(options); - if (secureEndpoint) { - // @ts-expect-error `getName()` isn't defined in `@types/node` - return https_1.Agent.prototype.getName.call(this, options); + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; } - // @ts-expect-error `getName()` isn't defined in `@types/node` - return super.getName(options); - } - createSocket(req, options, cb) { - const connectOpts = { - ...options, - secureEndpoint: this.isSecureEndpoint(options), - }; - const name = this.getName(connectOpts); - const fakeSocket = this.incrementSockets(name); - Promise.resolve() - .then(() => this.connect(req, connectOpts)) - .then((socket) => { - this.decrementSockets(name, fakeSocket); - if (socket instanceof http.Agent) { - // @ts-expect-error `addRequest()` isn't defined in `@types/node` - return socket.addRequest(req, connectOpts); + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } } - this[INTERNAL].currentSocket = socket; - // @ts-expect-error `createSocket()` isn't defined in `@types/node` - super.createSocket(req, options, cb); - }, (err) => { - this.decrementSockets(name, fakeSocket); - cb(err); - }); - } - createConnection() { - const socket = this[INTERNAL].currentSocket; - this[INTERNAL].currentSocket = undefined; - if (!socket) { - throw new Error('No socket was returned in the `connect()` function'); - } - return socket; - } - get defaultPort() { - return (this[INTERNAL].defaultPort ?? - (this.protocol === 'https:' ? 443 : 80)); - } - set defaultPort(v) { - if (this[INTERNAL]) { - this[INTERNAL].defaultPort = v; - } - } - get protocol() { - return (this[INTERNAL].protocol ?? - (this.isSecureEndpoint() ? 'https:' : 'http:')); - } - set protocol(v) { - if (this[INTERNAL]) { - this[INTERNAL].protocol = v; + + return parseNumeric(x, String(v), isNum, b); + } } - } -} -exports.Agent = Agent; -//# sourceMappingURL=index.js.map -/***/ }), + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); -/***/ 1546: -/***/ ((module) => { + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } -"use strict"; + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); -const arrify = value => { - if (value === null || value === undefined) { - return []; - } + if (str = str.slice(i, ++len)) { + len -= i; - if (Array.isArray(value)) { - return value; - } + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } - if (typeof value === 'string') { - return [value]; - } + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { - if (typeof value[Symbol.iterator] === 'function') { - return [...value]; - } + // Infinity. + x.c = x.e = null; - return [value]; -}; + // Underflow? + } else if (e < MIN_EXP) { -module.exports = arrify; + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + // Transform base -/***/ }), + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 -/***/ 3415: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); -// Packages -var retrier = __nccwpck_require__(1604); + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } -function retry(fn, opts) { - function run(resolve, reject) { - var options = opts || {}; - var op; + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } - // Default `randomize` to true - if (!('randomize' in options)) { - options.randomize = true; + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } } - op = retrier.operation(options); - // We allow the user to abort retrying - // this makes sense in the cases where - // knowledge is obtained that retrying - // would be futile (e.g.: auth errors) + // CONSTRUCTOR PROPERTIES - function bail(err) { - reject(err || new Error('Aborted')); - } - function onError(err, num) { - if (err.bail) { - bail(err); - return; - } + BigNumber.clone = clone; - if (!op.retry(err)) { - reject(op.mainError()); - } else if (options.onRetry) { - options.onRetry(err, num); - } - } + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; - function runAttempt(num) { - var val; - try { - val = fn(bail, num); - } catch (err) { - onError(err, num); - return; - } + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; - Promise.resolve(val) - .then(resolve) - .catch(function catchIt(err) { - onError(err, num); - }); - } + if (obj != null) { - op.attempt(runAttempt); - } + if (typeof obj == 'object') { - return new Promise(run); -} + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } -module.exports = retry; + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; -/***/ }), + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } -/***/ 4812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + } else { -module.exports = -{ - parallel : __nccwpck_require__(8210), - serial : __nccwpck_require__(445), - serialOrdered : __nccwpck_require__(3578) -}; + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; -/***/ }), -/***/ 1700: -/***/ ((module) => { + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; -// API -module.exports = abort; + var i, n, + c = v.c, + e = v.e, + s = v.s; -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); + out: if ({}.toString.call(c) == '[object Array]') { - // reset leftover jobs - state.jobs = {}; -} + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; -/***/ }), + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { -/***/ 2794: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } -var defer = __nccwpck_require__(5295); + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } -// API -module.exports = async; + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; - // check if async happened - defer(function() { isAsync = true; }); - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; -/***/ }), + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; -/***/ 5295: -/***/ ((module) => { -module.exports = defer; + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } -} + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); -/***/ }), + k = mathceil(dp / LOG_BASE); -/***/ 9023: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (CRYPTO) { -var async = __nccwpck_require__(2794) - , abort = __nccwpck_require__(1700) - ; + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { -// API -module.exports = iterate; + a = crypto.getRandomValues(new Uint32Array(k *= 2)); -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + for (; i < k;) { - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); - // clean up jobs - delete state.jobs[key]; + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; - // return salvaged results - callback(error, state.results); - }); -} + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; + // buffer + a = crypto.randomBytes(k *= 7); - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } + for (; i < k;) { - return aborter; -} + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { -/***/ }), + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } -/***/ 2474: -/***/ ((module) => { + // Use Math.random. + if (!CRYPTO) { -// API -module.exports = state; + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; + k = c[--i]; + dp %= LOG_BASE; - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } - return initState; -} + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + // Zero? + if (i < 0) { + c = [e = 0]; + } else { -/***/ }), + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); -/***/ 7942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); -var abort = __nccwpck_require__(1700) - , async = __nccwpck_require__(2794) - ; + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } -// API -module.exports = terminator; + rand.e = e; + rand.c = c; + return rand; + }; + })(); -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } - // fast forward iteration index - this.index = this.size; + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; - // abort jobs - abort(this); - // send back results we have so far - async(callback)(null, this.results); -} + // PRIVATE FUNCTIONS -/***/ }), + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; -/***/ 8210: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(2474) - , terminator = __nccwpck_require__(7942) - ; + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); -// Public API -module.exports = parallel; + arr[0] += alphabet.indexOf(str.charAt(i++)); -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); + for (j = 0; j < arr.length; j++) { - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; + return arr.reverse(); } - }); - - state.index++; - } - return terminator.bind(state, callback); -} + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; -/***/ }), + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; -/***/ 445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. -var serialOrdered = __nccwpck_require__(3578); + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } -// Public API -module.exports = serial; + // Convert the number as integer. -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; -/***/ }), + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); -/***/ 3578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Zero? + if (!xc[0]) return alphabet.charAt(0); -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(2474) - , terminator = __nccwpck_require__(7942) - ; + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); + // xc now represents str converted to baseOut. - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } + // THe index of the rounding digit. + d = e + dp + 1; - state.index++; + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } + // Look at the rounding digits and mode to determine whether to round up. - // done here - callback(null, state.results); - }); + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; - return terminator.bind(state, callback); -} + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); -/* - * -- Sort methods - */ + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { -/** - * sort helper to sort array elements in ascending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; -} + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { -/** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} + // Truncate xc to the required number of decimal places. + xc.length = d; + // Round up? + if (r) { -/***/ }), + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; -/***/ 1403: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } -var CombinedStream = __nccwpck_require__(5443); -var util = __nccwpck_require__(3837); -var path = __nccwpck_require__(1017); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var parseUrl = (__nccwpck_require__(7310).parse); -var fs = __nccwpck_require__(7147); -var Stream = (__nccwpck_require__(2781).Stream); -var mime = __nccwpck_require__(3583); -var asynckit = __nccwpck_require__(4812); -var populate = __nccwpck_require__(7027); + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); -// Public API -module.exports = FormData; + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); -// make it a Stream -util.inherits(FormData, CombinedStream); + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } -/** - * Create readable "multipart/form-data" streams. - * Can be used to submit forms - * and file uploads to other web applications. - * - * @constructor - * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream - */ -function FormData(options) { - if (!(this instanceof FormData)) { - return new FormData(options); - } + // The caller will add the sign. + return str; + }; + })(); - this._overheadLength = 0; - this._valueLength = 0; - this._valuesToMeasure = []; - CombinedStream.call(this); + // Perform division in the specified base. Called by div and convertBase. + div = (function () { - options = options || {}; - for (var option in options) { - this[option] = options[option]; - } -} + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; -FormData.LINE_BREAK = '\r\n'; -FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } -FormData.prototype.append = function(field, value, options) { + if (carry) x = [carry].concat(x); - options = options || {}; + return x; + } - // allow filename as single option - if (typeof options == 'string') { - options = {filename: options}; - } + function compare(a, b, aL, bL) { + var i, cmp; - var append = CombinedStream.prototype.append.bind(this); + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { - // all that streamy business can't handle numbers - if (typeof value == 'number') { - value = '' + value; - } + for (i = cmp = 0; i < aL; i++) { - // https://github.com/felixge/node-form-data/issues/38 - if (util.isArray(value)) { - // Please convert your array into string - // the way web server expects it - this._error(new Error('Arrays are not supported.')); - return; - } + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } - var header = this._multiPartHeader(field, value, options); - var footer = this._multiPartFooter(); + return cmp; + } - append(header); - append(value); - append(footer); + function subtract(a, b, aL, base) { + var i = 0; - // pass along options.knownLength - this._trackLength(header, value, options); -}; + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } -FormData.prototype._trackLength = function(header, value, options) { - var valueLength = 0; + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } - // used w/ getLengthSync(), when length is known. - // e.g. for streaming directly from a remote server, - // w/ a known file a size, and not wanting to wait for - // incoming file to finish to get its size. - if (options.knownLength != null) { - valueLength += +options.knownLength; - } else if (Buffer.isBuffer(value)) { - valueLength = value.length; - } else if (typeof value === 'string') { - valueLength = Buffer.byteLength(value); - } + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; - this._valueLength += valueLength; + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { - // @check why add CRLF? does this account for custom/multiple CRLFs? - this._overheadLength += - Buffer.byteLength(header) + - FormData.LINE_BREAK.length; + return new BigNumber( - // empty or either doesn't have path or not an http response or not a stream - if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { - return; - } + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : - // no need to bother with the length - if (!options.knownLength) { - this._valuesToMeasure.push(value); - } -}; + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } -FormData.prototype._lengthRetriever = function(value, callback) { + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; - if (value.hasOwnProperty('fd')) { + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } - // take read range into a account - // `end` = Infinity –> read file till the end - // - // TODO: Looks like there is bug in Node fs.createReadStream - // it doesn't respect `end` options without `start` options - // Fix it when node fixes it. - // https://github.com/joyent/node/issues/7819 - if (value.end != undefined && value.end != Infinity && value.start != undefined) { + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); - // when end specified - // no need to calculate range - // inclusive, starts with 0 - callback(null, value.end + 1 - (value.start ? value.start : 0)); + if (yc[i] > (xc[i] || 0)) e--; - // not that fast snoopy - } else { - // still need to fetch file size from fs - fs.stat(value.path, function(err, stat) { + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; - var fileSize; + // Normalise xc and yc so highest order digit of yc is >= base / 2. - if (err) { - callback(err); - return; - } + n = mathfloor(base / (yc[0] + 1)); - // update final size based on the range options - fileSize = stat.size - (value.start ? value.start : 0); - callback(null, fileSize); - }); - } + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } - // or http response - } else if (value.hasOwnProperty('httpVersion')) { - callback(null, +value.headers['content-length']); + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; - // or request stream http://github.com/mikeal/request - } else if (value.hasOwnProperty('httpModule')) { - // wait till response come back - value.on('response', function(response) { - value.pause(); - callback(null, +response.headers['content-length']); - }); - value.resume(); + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; - // something else - } else { - callback('Unknown stream'); - } -}; + do { + n = 0; -FormData.prototype._multiPartHeader = function(field, value, options) { - // custom header specified (as string)? - // it becomes responsible for boundary - // (e.g. to handle extra CRLFs on .NET servers) - if (typeof options.header == 'string') { - return options.header; - } + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); - var contentDisposition = this._getContentDisposition(value, options); - var contentType = this._getContentType(value, options); + // If divisor < remainder. + if (cmp < 0) { - var contents = ''; - var headers = { - // add custom disposition as third element or keep it two elements if not - 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), - // if no content type. allow it to be empty array - 'Content-Type': [].concat(contentType || []) - }; + // Calculate trial digit, n. - // allow custom headers. - if (typeof options.header == 'object') { - populate(headers, options.header); - } + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); - var header; - for (var prop in headers) { - if (!headers.hasOwnProperty(prop)) continue; - header = headers[prop]; + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); - // skip nullish headers. - if (header == null) { - continue; - } + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. - // convert all headers to arrays. - if (!Array.isArray(header)) { - header = [header]; - } + if (n > 1) { - // add non-empty headers. - if (header.length) { - contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; - } - } + // n may be > base only when base is 3. + if (n >= base) n = base - 1; - return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; -}; + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; -FormData.prototype._getContentDisposition = function(value, options) { + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; - var filename - , contentDisposition - ; + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { - if (typeof options.filepath === 'string') { - // custom filepath for relative paths - filename = path.normalize(options.filepath).replace(/\\/g, '/'); - } else if (options.filename || value.name || value.path) { - // custom filename take precedence - // formidable and the browser add a name property - // fs- and request- streams have path property - filename = path.basename(options.filename || value.name || value.path); - } else if (value.readable && value.hasOwnProperty('httpVersion')) { - // or try http response - filename = path.basename(value.client._httpMessage.path || ''); - } + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { - if (filename) { - contentDisposition = 'filename="' + filename + '"'; - } + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } - return contentDisposition; -}; + if (prodL < remL) prod = [0].concat(prod); -FormData.prototype._getContentType = function(value, options) { + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; - // use custom content-type above all - var contentType = options.contentType; + // If product was < remainder. + if (cmp == -1) { - // or try `name` from formidable, browser - if (!contentType && value.name) { - contentType = mime.lookup(value.name); - } + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; - // or try `path` from fs-, request- streams - if (!contentType && value.path) { - contentType = mime.lookup(value.path); - } + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 - // or if it's http-reponse - if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { - contentType = value.headers['content-type']; - } + // Add the next digit, n, to the result array. + qc[i++] = n; - // or guess it from the filepath or filename - if (!contentType && (options.filepath || options.filename)) { - contentType = mime.lookup(options.filepath || options.filename); - } + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); - // fallback to the default content type if `value` is not simple value - if (!contentType && typeof value == 'object') { - contentType = FormData.DEFAULT_CONTENT_TYPE; - } + more = rem[0] != null; - return contentType; -}; + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } -FormData.prototype._multiPartFooter = function() { - return function(next) { - var footer = FormData.LINE_BREAK; + if (base == BASE) { - var lastPart = (this._streams.length === 0); - if (lastPart) { - footer += this._lastBoundary(); - } + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); - next(footer); - }.bind(this); -}; + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); -FormData.prototype._lastBoundary = function() { - return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; -}; + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } -FormData.prototype.getHeaders = function(userHeaders) { - var header; - var formHeaders = { - 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() - }; + return q; + }; + })(); - for (header in userHeaders) { - if (userHeaders.hasOwnProperty(header)) { - formHeaders[header.toLowerCase()] = userHeaders[header]; - } - } - return formHeaders; -}; + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; -FormData.prototype.setBoundary = function(boundary) { - this._boundary = boundary; -}; + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); -FormData.prototype.getBoundary = function() { - if (!this._boundary) { - this._generateBoundary(); - } + if (!n.c) return n.toString(); - return this._boundary; -}; + c0 = n.c[0]; + ne = n.e; -FormData.prototype.getBuffer = function() { - var dataBuffer = new Buffer.alloc( 0 ); - var boundary = this.getBoundary(); + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); - // Create the form content. Add Line breaks to the end of data. - for (var i = 0, len = this._streams.length; i < len; i++) { - if (typeof this._streams[i] !== 'function') { + // n.e may have changed if the value was rounded up. + e = n.e; - // Add content to the buffer. - if(Buffer.isBuffer(this._streams[i])) { - dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); - }else { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); - } + str = coeffToString(n.c); + len = str.length; - // Add break after content. - if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); - } - } - } + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. - // Add the footer and return the Buffer object. - return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); -}; + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { -FormData.prototype._generateBoundary = function() { - // This generates a 50 character boundary similar to those used by Firefox. - // They are optimized for boyer-moore parsing. - var boundary = '--------------------------'; - for (var i = 0; i < 24; i++) { - boundary += Math.floor(Math.random() * 10).toString(16); - } + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); - this._boundary = boundary; -}; + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); -// Note: getLengthSync DOESN'T calculate streams length -// As workaround one can calculate file size manually -// and add it as knownLength option -FormData.prototype.getLengthSync = function() { - var knownLength = this._overheadLength + this._valueLength; + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } - // Don't get confused, there are 3 "internal" streams for each keyval pair - // so it basically checks if there is any value added to the form - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } + return n.s < 0 && c0 ? '-' + str : str; + } - // https://github.com/form-data/form-data/issues/40 - if (!this.hasKnownLength()) { - // Some async length retrievers are present - // therefore synchronous length calculation is false. - // Please use getLength(callback) to get proper length - this._error(new Error('Cannot calculate proper length in synchronous way.')); - } - return knownLength; -}; + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); -// Public API to check if length of added values is known -// https://github.com/form-data/form-data/issues/196 -// https://github.com/form-data/form-data/issues/262 -FormData.prototype.hasKnownLength = function() { - var hasKnownLength = true; + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } - if (this._valuesToMeasure.length) { - hasKnownLength = false; - } + return x; + } - return hasKnownLength; -}; -FormData.prototype.getLength = function(cb) { - var knownLength = this._overheadLength + this._valueLength; + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } + // Remove trailing zeros. + for (; !c[--j]; c.pop()); - if (!this._valuesToMeasure.length) { - process.nextTick(cb.bind(this, null, knownLength)); - return; - } + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); - asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { - if (err) { - cb(err); - return; - } + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { - values.forEach(function(length) { - knownLength += length; - }); + // Infinity. + n.c = n.e = null; - cb(null, knownLength); - }); -}; + // Underflow? + } else if (e < MIN_EXP) { -FormData.prototype.submit = function(params, cb) { - var request - , options - , defaults = {method: 'post'} - ; + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } - // parse provided url if it's string - // or treat it as options object - if (typeof params == 'string') { + return n; + } - params = parseUrl(params); - options = populate({ - port: params.port, - path: params.pathname, - host: params.hostname, - protocol: params.protocol - }, defaults); - // use custom params - } else { + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; - options = populate(params, defaults); - // if no port provided use default one - if (!options.port) { - options.port = options.protocol == 'https:' ? 443 : 80; - } - } + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); - // put that good code in getHeaders to some use - options.headers = this.getHeaders(params.headers); + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { - // https if specified, fallback to http in any other case - if (options.protocol == 'https:') { - request = https.request(options); - } else { - request = http.request(options); - } + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); - // get content length and fire away - this.getLength(function(err, length) { - if (err && err !== 'Unknown stream') { - this._error(err); - return; - } + if (b) { + base = b; - // add content length - if (length) { - request.setHeader('Content-Length', length); - } + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } - this.pipe(request); - if (cb) { - var onResponse; + if (str != s) return new BigNumber(s, base); + } - var callback = function (error, responce) { - request.removeListener('error', callback); - request.removeListener('response', onResponse); + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } - return cb.call(this, error, responce); - }; + // NaN + x.s = null; + } - onResponse = callback.bind(this, null); + x.c = x.e = null; + } + })(); - request.on('error', callback); - request.on('response', onResponse); - } - }.bind(this)); - return request; -}; + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; -FormData.prototype._error = function(err) { - if (!this.error) { - this.error = err; - this.pause(); - this.emit('error', err); - } -}; + // if x is not Infinity or NaN... + if (xc) { -FormData.prototype.toString = function () { - return '[object FormData]'; -}; + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; -/***/ }), + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; -/***/ 7027: -/***/ ((module) => { + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); -// populates missing values -module.exports = function(dst, src) { + if (ni >= xc.length) { - Object.keys(src).forEach(function(prop) - { - dst[prop] = dst[prop] || src[prop]; - }); + if (r) { - return dst; -}; + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); -/***/ }), + // Get the index of rd within n. + i %= LOG_BASE; -/***/ 9417: -/***/ ((module) => { + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; -"use strict"; + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); + r = r || sd < 0 || - var r = range(a, b, str); + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; + if (sd < 1 || !xc[0]) { + xc.length = 0; - if (ai >= 0 && bi > 0) { - if(a===b) { - return [ai, bi]; - } - begs = []; - left = str.length; + if (r) { - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { - bi = str.indexOf(b, i + 1); - } + // Zero. + xc[0] = x.e = 0; + } - i = ai < bi && ai >= 0 ? ai : bi; - } + return x; + } - if (begs.length) { - result = [ left, right ]; - } - } + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; - return result; -} + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + // Round up? + if (r) { -/***/ }), + for (; ;) { -/***/ 6463: -/***/ ((__unused_webpack_module, exports) => { + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { -"use strict"; + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } -exports.byteLength = byteLength -exports.toByteArray = toByteArray -exports.fromByteArray = fromByteArray + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } -var lookup = [] -var revLookup = [] -var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } -var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' -for (var i = 0, len = code.length; i < len; ++i) { - lookup[i] = code[i] - revLookup[code.charCodeAt(i)] = i -} + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; -// Support decoding URL-safe base64 strings, as Node.js does. -// See: https://en.wikipedia.org/wiki/Base64#URL_applications -revLookup['-'.charCodeAt(0)] = 62 -revLookup['_'.charCodeAt(0)] = 63 + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } -function getLens (b64) { - var len = b64.length + return x; + } - if (len % 4 > 0) { - throw new Error('Invalid string. Length must be a multiple of 4') - } - // Trim off extra bytes after placeholder bytes are found - // See: https://github.com/beatgammit/base64-js/issues/42 - var validLen = b64.indexOf('=') - if (validLen === -1) validLen = len + function valueOf(n) { + var str, + e = n.e; - var placeHoldersLen = validLen === len - ? 0 - : 4 - (validLen % 4) + if (e === null) return n.toString(); - return [validLen, placeHoldersLen] -} + str = coeffToString(n.c); -// base64 is 4/3 + up to two characters of the original data -function byteLength (b64) { - var lens = getLens(b64) - var validLen = lens[0] - var placeHoldersLen = lens[1] - return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen -} + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); -function _byteLength (b64, validLen, placeHoldersLen) { - return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen -} + return n.s < 0 ? '-' + str : str; + } -function toByteArray (b64) { - var tmp - var lens = getLens(b64) - var validLen = lens[0] - var placeHoldersLen = lens[1] - var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + // PROTOTYPE/INSTANCE METHODS - var curByte = 0 - // if there are placeholders, only get up to the last complete 4 chars - var len = placeHoldersLen > 0 - ? validLen - 4 - : validLen + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; - var i - for (i = 0; i < len; i += 4) { - tmp = - (revLookup[b64.charCodeAt(i)] << 18) | - (revLookup[b64.charCodeAt(i + 1)] << 12) | - (revLookup[b64.charCodeAt(i + 2)] << 6) | - revLookup[b64.charCodeAt(i + 3)] - arr[curByte++] = (tmp >> 16) & 0xFF - arr[curByte++] = (tmp >> 8) & 0xFF - arr[curByte++] = tmp & 0xFF - } - if (placeHoldersLen === 2) { - tmp = - (revLookup[b64.charCodeAt(i)] << 2) | - (revLookup[b64.charCodeAt(i + 1)] >> 4) - arr[curByte++] = tmp & 0xFF - } + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; - if (placeHoldersLen === 1) { - tmp = - (revLookup[b64.charCodeAt(i)] << 10) | - (revLookup[b64.charCodeAt(i + 1)] << 4) | - (revLookup[b64.charCodeAt(i + 2)] >> 2) - arr[curByte++] = (tmp >> 8) & 0xFF - arr[curByte++] = tmp & 0xFF - } - return arr -} + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; -function tripletToBase64 (num) { - return lookup[num >> 18 & 0x3F] + - lookup[num >> 12 & 0x3F] + - lookup[num >> 6 & 0x3F] + - lookup[num & 0x3F] -} + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); -function encodeChunk (uint8, start, end) { - var tmp - var output = [] - for (var i = start; i < end; i += 3) { - tmp = - ((uint8[i] << 16) & 0xFF0000) + - ((uint8[i + 1] << 8) & 0xFF00) + - (uint8[i + 2] & 0xFF) - output.push(tripletToBase64(tmp)) - } - return output.join('') -} + return round(new BigNumber(x), dp + x.e + 1, rm); + } -function fromByteArray (uint8) { - var tmp - var len = uint8.length - var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes - var parts = [] - var maxChunkLength = 16383 // must be multiple of 3 + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; - // go through the array every three bytes, we'll deal with trailing stuff later - for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { - parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) - } + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; - // pad the end with zeros, but make sure to not forget the extra bytes - if (extraBytes === 1) { - tmp = uint8[len - 1] - parts.push( - lookup[tmp >> 2] + - lookup[(tmp << 4) & 0x3F] + - '==' - ) - } else if (extraBytes === 2) { - tmp = (uint8[len - 2] << 8) + uint8[len - 1] - parts.push( - lookup[tmp >> 10] + - lookup[(tmp >> 4) & 0x3F] + - lookup[(tmp << 2) & 0x3F] + - '=' - ) - } + return n; + }; - return parts.join('') -} + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; -/***/ }), -/***/ 7558: -/***/ (function(module) { + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; -;(function (globalObject) { - 'use strict'; -/* - * bignumber.js v9.1.2 - * A JavaScript library for arbitrary-precision arithmetic. - * https://github.com/MikeMcl/bignumber.js - * Copyright (c) 2022 Michael Mclaughlin - * MIT Licensed. - * - * BigNumber.prototype methods | BigNumber methods - * | - * absoluteValue abs | clone - * comparedTo | config set - * decimalPlaces dp | DECIMAL_PLACES - * dividedBy div | ROUNDING_MODE - * dividedToIntegerBy idiv | EXPONENTIAL_AT - * exponentiatedBy pow | RANGE - * integerValue | CRYPTO - * isEqualTo eq | MODULO_MODE - * isFinite | POW_PRECISION - * isGreaterThan gt | FORMAT - * isGreaterThanOrEqualTo gte | ALPHABET - * isInteger | isBigNumber - * isLessThan lt | maximum max - * isLessThanOrEqualTo lte | minimum min - * isNaN | random - * isNegative | sum - * isPositive | - * isZero | - * minus | - * modulo mod | - * multipliedBy times | - * negated | - * plus | - * precision sd | - * shiftedBy | - * squareRoot sqrt | - * toExponential | - * toFixed | - * toFormat | - * toFraction | - * toJSON | - * toNumber | - * toPrecision | - * toString | - * valueOf | - * - */ + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + n = new BigNumber(n); - var BigNumber, - isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, - mathceil = Math.ceil, - mathfloor = Math.floor, + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } - bignumberError = '[BigNumber Error] ', - tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + if (m != null) m = new BigNumber(m); - BASE = 1e14, - LOG_BASE = 14, - MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 - // MAX_INT32 = 0x7fffffff, // 2^31 - 1 - POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], - SQRT_BASE = 1e7, + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; - // EDITABLE - // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and - // the arguments to toExponential, toFixed, toFormat, and toPrecision. - MAX = 1E9; // 0 to MAX_INT32 + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } - /* - * Create and return a BigNumber constructor. - */ - function clone(configObject) { - var div, convertBase, parseNumeric, - P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, - ONE = new BigNumber(1), + nIsNeg = n.s < 0; + if (m) { - //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); - // The default values below must be integers within the inclusive ranges stated. - // The values can also be changed at run-time using BigNumber.set. + if (isModExp) x = x.mod(m); - // The maximum number of decimal places for operations involving division. - DECIMAL_PLACES = 20, // 0 to MAX + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { - // The rounding mode used when rounding to the above decimal places, and when using - // toExponential, toFixed, toFormat and toPrecision, and round (default value). - // UP 0 Away from zero. - // DOWN 1 Towards zero. - // CEIL 2 Towards +Infinity. - // FLOOR 3 Towards -Infinity. - // HALF_UP 4 Towards nearest neighbour. If equidistant, up. - // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. - // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. - // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. - // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. - ROUNDING_MODE = 4, // 0 to 8 + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; - // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; - // The exponent value at and beneath which toString returns exponential notation. - // Number type: -7 - TO_EXP_NEG = -7, // 0 to -MAX + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); - // The exponent value at and above which toString returns exponential notation. - // Number type: 21 - TO_EXP_POS = 21, // 0 to MAX + } else if (POW_PRECISION) { - // RANGE : [MIN_EXP, MAX_EXP] + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } - // The minimum exponent value, beneath which underflow to zero occurs. - // Number type: -324 (5e-324) - MIN_EXP = -1e7, // -1 to -MAX + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } - // The maximum exponent value, above which overflow to Infinity occurs. - // Number type: 308 (1.7976931348623157e+308) - // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. - MAX_EXP = 1e7, // 1 to MAX + y = new BigNumber(ONE); - // Whether to use cryptographically-secure random number generation, if available. - CRYPTO = false, // true or false + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { - // The modulo mode used when calculating the modulus: a mod n. - // The quotient (q = a / n) is calculated according to the corresponding rounding mode. - // The remainder (r) is calculated as: r = a - n * q. - // - // UP 0 The remainder is positive if the dividend is negative, else is negative. - // DOWN 1 The remainder has the same sign as the dividend. - // This modulo mode is commonly known as 'truncated division' and is - // equivalent to (a % n) in JavaScript. - // FLOOR 3 The remainder has the same sign as the divisor (Python %). - // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. - // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). - // The remainder is always positive. - // - // The truncated division, floored division, Euclidian division and IEEE 754 remainder - // modes are commonly used for the modulus operation. - // Although the other rounding modes can also be used, they may not give useful results. - MODULO_MODE = 1, // 0 to 9 + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; - // The maximum number of significant digits of the result of the exponentiatedBy operation. - // If POW_PRECISION is 0, there will be unlimited significant digits. - POW_PRECISION = 0, // 0 to MAX + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } - // The format specification used by the BigNumber.prototype.toFormat method. - FORMAT = { - prefix: '', - groupSize: 3, - secondaryGroupSize: 0, - groupSeparator: ',', - decimalSeparator: '.', - fractionGroupSize: 0, - fractionGroupSeparator: '\xA0', // non-breaking space - suffix: '' - }, + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); - // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', - // '-', '.', whitespace, or repeated character. - // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' - ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', - alphabetHasNormalDecimalDigits = true; + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + x = x.times(x); - //------------------------------------------------------------------------------------------ + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); - // CONSTRUCTOR + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; /* - * The BigNumber constructor and exported function. - * Create and return a new instance of a BigNumber object. + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. * - * v {number|string|BigNumber} A numeric value. - * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' */ - function BigNumber(v, b) { - var alphabet, c, caseChanged, e, i, isNum, len, str, - x = this; - - // Enable constructor call without `new`. - if (!(x instanceof BigNumber)) return new BigNumber(v, b); - - if (b == null) { + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; - if (v && v._isBigNumber === true) { - x.s = v.s; - if (!v.c || v.e > MAX_EXP) { - x.c = x.e = null; - } else if (v.e < MIN_EXP) { - x.c = [x.e = 0]; - } else { - x.e = v.e; - x.c = v.c.slice(); - } + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; - return; - } - if ((isNum = typeof v == 'number') && v * 0 == 0) { + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; - // Use `1 / n` to handle minus zero also. - x.s = 1 / v < 0 ? (v = -v, -1) : 1; - // Fast path for integers, where n < 2147483648 (2**31). - if (v === ~~v) { - for (e = 0, i = v; i >= 10; i /= 10, e++); + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; - if (e > MAX_EXP) { - x.c = x.e = null; - } else { - x.e = e; - x.c = [v]; - } - return; - } + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; - str = String(v); - } else { + }; - if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); - x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; - } + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; - // Decimal point? - if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); - // Exponential form? - if ((i = str.search(/e/i)) > 0) { + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; - // Determine exponent. - if (e < 0) e = i; - e += +str.slice(i + 1); - str = str.substring(0, i); - } else if (e < 0) { - // Integer. - e = str.length; - } + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; - } else { - // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' - intCheck(b, 2, ALPHABET.length, 'Base'); + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; - // Allow exponential notation to be used with base 10 argument, while - // also rounding to DECIMAL_PLACES as with other bases. - if (b == 10 && alphabetHasNormalDecimalDigits) { - x = new BigNumber(v); - return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); - } - str = String(v); + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; - if (isNum = typeof v == 'number') { - // Avoid potential interpretation of Infinity and NaN as base 44+ values. - if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; - x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; - // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' - if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { - throw Error - (tooManyDigits + v); - } - } else { - x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; - } + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; - alphabet = ALPHABET.slice(0, b); - e = i = 0; - // Check that str is a valid base b number. - // Don't use RegExp, so alphabet can contain special characters. - for (len = str.length; i < len; i++) { - if (alphabet.indexOf(c = str.charAt(i)) < 0) { - if (c == '.') { + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; - // If '.' is not the first character and it has not be found before. - if (i > e) { - e = len; - continue; - } - } else if (!caseChanged) { + y = new BigNumber(y, b); + b = y.s; - // Allow e.g. hexadecimal 'FF' as well as 'ff'. - if (str == str.toUpperCase() && (str = str.toLowerCase()) || - str == str.toLowerCase() && (str = str.toUpperCase())) { - caseChanged = true; - i = -1; - e = 0; - continue; - } - } + // Either NaN? + if (!a || !b) return new BigNumber(NaN); - return parseNumeric(x, String(v), isNum, b); - } - } + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } - // Prevent later check for length on converted number. - isNum = false; - str = convertBase(str, b, 10, x.s); + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; - // Decimal point? - if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); - else e = str.length; - } + if (!xe || !ye) { - // Determine leading zeros. - for (i = 0; str.charCodeAt(i) === 48; i++); + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); - // Determine trailing zeros. - for (len = str.length; str.charCodeAt(--len) === 48;); + // Either zero? + if (!xc[0] || !yc[0]) { - if (str = str.slice(i, ++len)) { - len -= i; + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : - // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' - if (isNum && BigNumber.DEBUG && - len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { - throw Error - (tooManyDigits + (x.s * v)); + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); } + } - // Overflow? - if ((e = e - i - 1) > MAX_EXP) { - - // Infinity. - x.c = x.e = null; + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); - // Underflow? - } else if (e < MIN_EXP) { + // Determine which is the bigger number. + if (a = xe - ye) { - // Zero. - x.c = [x.e = 0]; + if (xLTy = a < 0) { + a = -a; + t = xc; } else { - x.e = e; - x.c = []; + ye = xe; + t = yc; + } - // Transform base + t.reverse(); - // e is the base 10 exponent. - // i is where to slice str to get the first element of the coefficient array. - i = (e + 1) % LOG_BASE; - if (e < 0) i += LOG_BASE; // i < 1 + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { - if (i < len) { - if (i) x.c.push(+str.slice(0, i)); + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; - for (len -= LOG_BASE; i < len;) { - x.c.push(+str.slice(i, i += LOG_BASE)); - } + for (a = b = 0; b < j; b++) { - i = LOG_BASE - (str = str.slice(i)).length; - } else { - i -= len; + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; } - - for (; i--; str += '0'); - x.c.push(+str); } - } else { - - // Zero. - x.c = [x.e = 0]; } - } - - - // CONSTRUCTOR PROPERTIES + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } - BigNumber.clone = clone; - - BigNumber.ROUND_UP = 0; - BigNumber.ROUND_DOWN = 1; - BigNumber.ROUND_CEIL = 2; - BigNumber.ROUND_FLOOR = 3; - BigNumber.ROUND_HALF_UP = 4; - BigNumber.ROUND_HALF_DOWN = 5; - BigNumber.ROUND_HALF_EVEN = 6; - BigNumber.ROUND_HALF_CEIL = 7; - BigNumber.ROUND_HALF_FLOOR = 8; - BigNumber.EUCLID = 9; - + b = (j = yc.length) - (i = xc.length); - /* - * Configure infrequently-changing library-wide settings. - * - * Accept an object with the following optional properties (if the value of a property is - * a number, it must be an integer within the inclusive range stated): - * - * DECIMAL_PLACES {number} 0 to MAX - * ROUNDING_MODE {number} 0 to 8 - * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] - * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] - * CRYPTO {boolean} true or false - * MODULO_MODE {number} 0 to 9 - * POW_PRECISION {number} 0 to MAX - * ALPHABET {string} A string of two or more unique characters which does - * not contain '.'. - * FORMAT {object} An object with some of the following properties: - * prefix {string} - * groupSize {number} - * secondaryGroupSize {number} - * groupSeparator {string} - * decimalSeparator {string} - * fractionGroupSize {number} - * fractionGroupSeparator {string} - * suffix {string} - * - * (The values assigned to the above FORMAT object properties are not checked for validity.) - * - * E.g. - * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) - * - * Ignore properties/parameters set to null or undefined, except for ALPHABET. - * - * Return an object with the properties current values. - */ - BigNumber.config = BigNumber.set = function (obj) { - var p, v; + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; - if (obj != null) { + // Subtract yc from xc. + for (; j > a;) { - if (typeof obj == 'object') { + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } - // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. - // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { - v = obj[p]; - intCheck(v, 0, MAX, p); - DECIMAL_PLACES = v; - } + xc[j] -= yc[j]; + } - // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. - // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { - v = obj[p]; - intCheck(v, 0, 8, p); - ROUNDING_MODE = v; - } + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); - // EXPONENTIAL_AT {number|number[]} - // Integer, -MAX to MAX inclusive or - // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. - // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { - v = obj[p]; - if (v && v.pop) { - intCheck(v[0], -MAX, 0, p); - intCheck(v[1], 0, MAX, p); - TO_EXP_NEG = v[0]; - TO_EXP_POS = v[1]; - } else { - intCheck(v, -MAX, MAX, p); - TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); - } - } + // Zero? + if (!xc[0]) { - // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or - // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. - // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' - if (obj.hasOwnProperty(p = 'RANGE')) { - v = obj[p]; - if (v && v.pop) { - intCheck(v[0], -MAX, -1, p); - intCheck(v[1], 1, MAX, p); - MIN_EXP = v[0]; - MAX_EXP = v[1]; - } else { - intCheck(v, -MAX, MAX, p); - if (v) { - MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); - } else { - throw Error - (bignumberError + p + ' cannot be zero: ' + v); - } - } - } + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } - // CRYPTO {boolean} true or false. - // '[BigNumber Error] CRYPTO not true or false: {v}' - // '[BigNumber Error] crypto unavailable' - if (obj.hasOwnProperty(p = 'CRYPTO')) { - v = obj[p]; - if (v === !!v) { - if (v) { - if (typeof crypto != 'undefined' && crypto && - (crypto.getRandomValues || crypto.randomBytes)) { - CRYPTO = v; - } else { - CRYPTO = !v; - throw Error - (bignumberError + 'crypto unavailable'); - } - } else { - CRYPTO = v; - } - } else { - throw Error - (bignumberError + p + ' not true or false: ' + v); - } - } + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; - // MODULO_MODE {number} Integer, 0 to 9 inclusive. - // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'MODULO_MODE')) { - v = obj[p]; - intCheck(v, 0, 9, p); - MODULO_MODE = v; - } - // POW_PRECISION {number} Integer, 0 to MAX inclusive. - // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'POW_PRECISION')) { - v = obj[p]; - intCheck(v, 0, MAX, p); - POW_PRECISION = v; - } + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; - // FORMAT {object} - // '[BigNumber Error] FORMAT not an object: {v}' - if (obj.hasOwnProperty(p = 'FORMAT')) { - v = obj[p]; - if (typeof v == 'object') FORMAT = v; - else throw Error - (bignumberError + p + ' not an object: ' + v); - } + y = new BigNumber(y, b); - // ALPHABET {string} - // '[BigNumber Error] ALPHABET invalid: {v}' - if (obj.hasOwnProperty(p = 'ALPHABET')) { - v = obj[p]; + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); - // Disallow if less than two characters, - // or if it contains '+', '-', '.', whitespace, or a repeated character. - if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { - alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; - ALPHABET = v; - } else { - throw Error - (bignumberError + p + ' invalid: ' + v); - } - } + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } - } else { + if (MODULO_MODE == 9) { - // '[BigNumber Error] Object expected: {v}' - throw Error - (bignumberError + 'Object expected: ' + obj); - } + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); } - return { - DECIMAL_PLACES: DECIMAL_PLACES, - ROUNDING_MODE: ROUNDING_MODE, - EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], - RANGE: [MIN_EXP, MAX_EXP], - CRYPTO: CRYPTO, - MODULO_MODE: MODULO_MODE, - POW_PRECISION: POW_PRECISION, - FORMAT: FORMAT, - ALPHABET: ALPHABET - }; + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; }; /* - * Return true if v is a BigNumber instance, otherwise return false. - * - * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. - * - * v {any} + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I * - * '[BigNumber Error] Invalid BigNumber: {v}' + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). */ - BigNumber.isBigNumber = function (v) { - if (!v || v._isBigNumber !== true) return false; - if (!BigNumber.DEBUG) return true; + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; - var i, n, - c = v.c, - e = v.e, - s = v.s; + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { - out: if ({}.toString.call(c) == '[object Array]') { + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; - if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; - // If the first element is zero, the BigNumber value must be zero. - if (c[0] === 0) { - if (e === 0 && c.length === 1) return true; - break out; + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; } + } - // Calculate number of digits that c[0] should have, based on the exponent. - i = (e + 1) % LOG_BASE; - if (i < 1) i += LOG_BASE; + return y; + } - // Calculate number of digits of c[0]. - //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { - if (String(c[0]).length == i) { + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; - for (i = 0; i < c.length; i++) { - n = c[i]; - if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; - } + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } - // Last element cannot be zero, unless it is the only element. - if (n !== 0) return true; - } + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; } - // Infinity/NaN - } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { - return true; + zc[j] = c; } - throw Error - (bignumberError + 'Invalid BigNumber: ' + v); - }; - + if (c) { + ++e; + } else { + zc.splice(0, 1); + } - /* - * Return a new BigNumber whose value is the maximum of the arguments. - * - * arguments {number|string|BigNumber} - */ - BigNumber.maximum = BigNumber.max = function () { - return maxOrMin(arguments, -1); + return normalise(y, zc, e); }; /* - * Return a new BigNumber whose value is the minimum of the arguments. - * - * arguments {number|string|BigNumber} + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. */ - BigNumber.minimum = BigNumber.min = function () { - return maxOrMin(arguments, 1); + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; }; /* - * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, - * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing - * zeros are produced). - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' - * '[BigNumber Error] crypto unavailable' + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). */ - BigNumber.random = (function () { - var pow2_53 = 0x20000000000000; - - // Return a 53 bit integer n, where 0 <= n < 9007199254740992. - // Check if Math.random() produces more than 32 bits of randomness. - // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. - // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. - var random53bitInt = (Math.random() * pow2_53) & 0x1fffff - ? function () { return mathfloor(Math.random() * pow2_53); } - : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + - (Math.random() * 0x800000 | 0); }; - - return function (dp) { - var a, b, e, k, v, - i = 0, - c = [], - rand = new BigNumber(ONE); + P.plus = function (y, b) { + var t, + x = this, + a = x.s; - if (dp == null) dp = DECIMAL_PLACES; - else intCheck(dp, 0, MAX); + y = new BigNumber(y, b); + b = y.s; - k = mathceil(dp / LOG_BASE); + // Either NaN? + if (!a || !b) return new BigNumber(NaN); - if (CRYPTO) { + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } - // Browsers supporting crypto.getRandomValues. - if (crypto.getRandomValues) { + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; - a = crypto.getRandomValues(new Uint32Array(k *= 2)); + if (!xe || !ye) { - for (; i < k;) { + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); - // 53 bits: - // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) - // 11111 11111111 11111111 11111111 11100000 00000000 00000000 - // ((Math.pow(2, 32) - 1) >>> 11).toString(2) - // 11111 11111111 11111111 - // 0x20000 is 2^21. - v = a[i] * 0x20000 + (a[i + 1] >>> 11); + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } - // Rejection sampling: - // 0 <= v < 9007199254740992 - // Probability that v >= 9e15, is - // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 - if (v >= 9e15) { - b = crypto.getRandomValues(new Uint32Array(2)); - a[i] = b[0]; - a[i + 1] = b[1]; - } else { + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); - // 0 <= v <= 8999999999999999 - // 0 <= (v % 1e14) <= 99999999999999 - c.push(v % 1e14); - i += 2; - } - } - i = k / 2; + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } - // Node.js supporting crypto.randomBytes. - } else if (crypto.randomBytes) { + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } - // buffer - a = crypto.randomBytes(k *= 7); + a = xc.length; + b = yc.length; - for (; i < k;) { + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } - // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 - // 0x100000000 is 2^32, 0x1000000 is 2^24 - // 11111 11111111 11111111 11111111 11111111 11111111 11111111 - // 0 <= v < 9007199254740992 - v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + - (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + - (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } - if (v >= 9e15) { - crypto.randomBytes(7).copy(a, i); - } else { + if (a) { + xc = [a].concat(xc); + ++ye; + } - // 0 <= (v % 1e14) <= 99999999999999 - c.push(v % 1e14); - i += 7; - } - } - i = k / 7; - } else { - CRYPTO = false; - throw Error - (bignumberError + 'crypto unavailable'); - } - } + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; - // Use Math.random. - if (!CRYPTO) { - for (; i < k;) { - v = random53bitInt(); - if (v < 9e15) c[i++] = v % 1e14; - } - } + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; - k = c[--i]; - dp %= LOG_BASE; + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); - // Convert trailing digits to zeros according to dp. - if (k && dp) { - v = POWS_TEN[LOG_BASE - dp]; - c[i] = mathfloor(k / v) * v; - } + return round(new BigNumber(x), sd, rm); + } - // Remove trailing elements which are zero. - for (; c[i] === 0; c.pop(), i--); + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; - // Zero? - if (i < 0) { - c = [e = 0]; - } else { + if (v = c[v]) { - // Remove leading elements which are zero and adjust exponent accordingly. - for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); - // Count the digits of the first element of c to determine leading zeros, and... - for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } - // adjust the exponent accordingly. - if (i < LOG_BASE) e -= LOG_BASE - i; - } + if (sd && x.e + 1 > n) n = x.e + 1; - rand.e = e; - rand.c = c; - return rand; - }; - })(); + return n; + }; /* - * Return a BigNumber whose value is the sum of the arguments. + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. * - * arguments {number|string|BigNumber} + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' */ - BigNumber.sum = function () { - var i = 1, - args = arguments, - sum = new BigNumber(args[0]); - for (; i < args.length;) sum = sum.plus(args[i++]); - return sum; + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); }; - // PRIVATE FUNCTIONS - - - // Called by BigNumber and BigNumber.prototype.toString. - convertBase = (function () { - var decimal = '0123456789'; - - /* - * Convert string of baseIn to an array of numbers of baseOut. - * Eg. toBaseOut('255', 10, 16) returns [15, 15]. - * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. - */ - function toBaseOut(str, baseIn, baseOut, alphabet) { - var j, - arr = [0], - arrL, - i = 0, - len = str.length; + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); - for (; i < len;) { - for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } - arr[0] += alphabet.indexOf(str.charAt(i++)); + // Initial estimate. + s = Math.sqrt(+valueOf(x)); - for (j = 0; j < arr.length; j++) { + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); - if (arr[j] > baseOut - 1) { - if (arr[j + 1] == null) arr[j + 1] = 0; - arr[j + 1] += arr[j] / baseOut | 0; - arr[j] %= baseOut; - } - } + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; } - return arr.reverse(); + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); } - // Convert a numeric string of baseIn to a numeric string of baseOut. - // If the caller is toString, we are converting from base 10 to baseOut. - // If the caller is BigNumber, we are converting from baseIn to base 10. - return function (str, baseIn, baseOut, sign, callerIsToString) { - var alphabet, d, e, k, r, x, xc, y, - i = str.indexOf('.'), - dp = DECIMAL_PLACES, - rm = ROUNDING_MODE; - - // Non-integer. - if (i >= 0) { - k = POW_PRECISION; + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; - // Unlimited precision. - POW_PRECISION = 0; - str = str.replace('.', ''); - y = new BigNumber(baseIn); - x = y.pow(str.length - i); - POW_PRECISION = k; + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); - // Convert str as if an integer, then restore the fraction part by dividing the - // result by its base raised to a power. + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { - y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), - 10, baseOut, decimal); - y.e = y.c.length; - } + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); - // Convert the number as integer. + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { - xc = toBaseOut(str, baseIn, baseOut, callerIsToString - ? (alphabet = ALPHABET, decimal) - : (alphabet = decimal, ALPHABET)); + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); - // xc now represents str as an integer and converted to baseOut. e is the exponent. - e = k = xc.length; + if (t.times(t).eq(x)) { + r = t; + break; + } + } - // Remove trailing zeros. - for (; xc[--k] == 0; xc.pop()); + dp += 4; + s += 4; + rep = 1; + } else { - // Zero? - if (!xc[0]) return alphabet.charAt(0); + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { - // Does str represent an integer? If so, no need for the division. - if (i < 0) { - --e; - } else { - x.c = xc; - x.e = e; + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } - // The sign is needed for correct rounding. - x.s = sign; - x = div(x, y, dp, rm, baseOut); - xc = x.c; - r = x.r; - e = x.e; + break; + } + } } + } - // xc now represents str converted to baseOut. + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; - // THe index of the rounding digit. - d = e + dp + 1; - // The rounding digit: the digit to the right of the digit that may be rounded up. - i = xc[d]; + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; - // Look at the rounding digits and mode to determine whether to round up. - k = baseOut / 2; - r = r || d < 0 || xc[d + 1] != null; + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; - r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) - : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || - rm == (x.s < 0 ? 8 : 7)); - // If the index of the rounding digit is not greater than zero, or xc represents - // zero, then the result of the base conversion is zero or, if rounding up, a value - // such as 0.00001. - if (d < 1 || !xc[0]) { + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; - // 1^-dp or 0 - str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } - // Truncate xc to the required number of decimal places. - xc.length = d; - - // Round up? - if (r) { - - // Rounding up may mean the previous digit has to be rounded up and so on. - for (--baseOut; ++xc[--d] > baseOut;) { - xc[d] = 0; - - if (!d) { - ++e; - xc = [1].concat(xc); - } - } - } - - // Determine trailing zeros. - for (k = xc.length; !xc[--k];); + str = x.toFixed(dp, rm); - // E.g. [4, 11, 15] becomes 4bf. - for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; - // Add leading zeros, decimal point and trailing zeros as required. - str = toFixedPoint(str, e, alphabet.charAt(0)); + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; } - // The caller will add the sign. - return str; - }; - })(); + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } - // Perform division in the specified base. Called by div and convertBase. - div = (function () { + return (format.prefix || '') + str + (format.suffix || ''); + }; - // Assume non-zero x and k. - function multiply(x, k, base) { - var m, temp, xlo, xhi, - carry = 0, - i = x.length, - klo = k % SQRT_BASE, - khi = k / SQRT_BASE | 0; - for (x = x.slice(); i--;) { - xlo = x[i] % SQRT_BASE; - xhi = x[i] / SQRT_BASE | 0; - m = khi * xlo + xhi * klo; - temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; - carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; - x[i] = temp % base; - } + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; - if (carry) x = [carry].concat(x); + if (md != null) { + n = new BigNumber(md); - return x; + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } } - function compare(a, b, aL, bL) { - var i, cmp; - - if (aL != bL) { - cmp = aL > bL ? 1 : -1; - } else { - - for (i = cmp = 0; i < aL; i++) { + if (!xc) return new BigNumber(x); - if (a[i] != b[i]) { - cmp = a[i] > b[i] ? 1 : -1; - break; - } - } - } + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); - return cmp; - } + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; - function subtract(a, b, aL, base) { - var i = 0; + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); - // Subtract b from a. - for (; aL--;) { - a[aL] -= i; - i = a[aL] < b[aL] ? 1 : 0; - a[aL] = i * base + a[aL] - b[aL]; - } + // n0 = d1 = 0 + n0.c[0] = 0; - // Remove leading zeros. - for (; !a[0] && a.length > 1; a.splice(0, 1)); + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; } - // x: dividend, y: divisor. - return function (x, y, dp, rm, base) { - var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, - yL, yz, - s = x.s == y.s ? 1 : -1, - xc = x.c, - yc = y.c; - - // Either NaN, Infinity or 0? - if (!xc || !xc[0] || !yc || !yc[0]) { + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; - return new BigNumber( + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; - // Return NaN if either NaN, or both Infinity or 0. - !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + MAX_EXP = exp; - // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. - xc && xc[0] == 0 || !yc ? s * 0 : s / 0 - ); - } + return r; + }; - q = new BigNumber(s); - qc = q.c = []; - e = x.e - y.e; - s = dp + e + 1; - if (!base) { - base = BASE; - e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); - s = s / LOG_BASE | 0; - } + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; - // Result exponent may be one less then the current value of e. - // The coefficients of the BigNumbers from convertBase may have trailing zeros. - for (i = 0; yc[i] == (xc[i] || 0); i++); - if (yc[i] > (xc[i] || 0)) e--; + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; - if (s < 0) { - qc.push(1); - more = true; - } else { - xL = xc.length; - yL = yc.length; - i = 0; - s += 2; - // Normalise xc and yc so highest order digit of yc is >= base / 2. + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; - n = mathfloor(base / (yc[0] + 1)); + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } - // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. - // if (n > 1 || n++ == 1 && yc[0] < base / 2) { - if (n > 1) { - yc = multiply(yc, n, base); - xc = multiply(xc, n, base); - yL = yc.length; - xL = xc.length; - } + if (s < 0 && n.c[0]) str = '-' + str; + } - xi = yL; - rem = xc.slice(0, yL); - remL = rem.length; + return str; + }; - // Add zeros to make remainder as long as divisor. - for (; remL < yL; rem[remL++] = 0); - yz = yc.slice(); - yz = [0].concat(yz); - yc0 = yc[0]; - if (yc[1] >= base / 2) yc0++; - // Not necessary, but to prevent trial digit n > base, when using base 3. - // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; - do { - n = 0; + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; - // Compare divisor and remainder. - cmp = compare(yc, rem, yL, remL); - // If divisor < remainder. - if (cmp < 0) { + P._isBigNumber = true; - // Calculate trial digit, n. + if (configObject != null) BigNumber.set(configObject); - rem0 = rem[0]; - if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + return BigNumber; + } - // n is how many times the divisor goes into the current remainder. - n = mathfloor(rem0 / yc0); - // Algorithm: - // product = divisor multiplied by trial digit (n). - // Compare product and remainder. - // If product is greater than remainder: - // Subtract divisor from product, decrement trial digit. - // Subtract product from remainder. - // If product was less than remainder at the last compare: - // Compare new remainder and divisor. - // If remainder is greater than divisor: - // Subtract divisor from remainder, increment trial digit. + // PRIVATE HELPER FUNCTIONS - if (n > 1) { + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. - // n may be > base only when base is 3. - if (n >= base) n = base - 1; - // product = divisor * trial digit. - prod = multiply(yc, n, base); - prodL = prod.length; - remL = rem.length; + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } - // Compare product and remainder. - // If product > remainder then trial digit n too high. - // n is 1 too high about 5% of the time, and is not known to have - // ever been more than 1 too high. - while (compare(prod, rem, prodL, remL) == 1) { - n--; - // Subtract divisor from product. - subtract(prod, yL < prodL ? yz : yc, prodL, base); - prodL = prod.length; - cmp = 1; - } - } else { + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; - // n is 0 or 1, cmp is -1. - // If n is 0, there is no need to compare yc and rem again below, - // so change cmp to 1 to avoid it. - // If n is 1, leave cmp as -1, so yc and rem are compared again. - if (n == 0) { + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } - // divisor < remainder, so n must be at least 1. - cmp = n = 1; - } + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); - // product = divisor - prod = yc.slice(); - prodL = prod.length; - } + return r.slice(0, j + 1 || 1); + } - if (prodL < remL) prod = [0].concat(prod); - // Subtract product from remainder. - subtract(rem, prod, remL, base); - remL = rem.length; + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; - // If product was < remainder. - if (cmp == -1) { + // Either NaN? + if (!i || !j) return null; - // Compare divisor and new remainder. - // If divisor < new remainder, subtract divisor from remainder. - // Trial digit n too low. - // n is 1 too low about 5% of the time, and very rarely 2 too low. - while (compare(yc, rem, yL, remL) < 1) { - n++; + a = xc && !xc[0]; + b = yc && !yc[0]; - // Subtract divisor from remainder. - subtract(rem, yL < remL ? yz : yc, remL, base); - remL = rem.length; - } - } - } else if (cmp === 0) { - n++; - rem = [0]; - } // else cmp === 1 and n will be 0 + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; - // Add the next digit, n, to the result array. - qc[i++] = n; + // Signs differ? + if (i != j) return i; - // Update the remainder. - if (rem[0]) { - rem[remL++] = xc[xi] || 0; - } else { - rem = [xc[xi]]; - remL = 1; - } - } while ((xi++ < xL || rem[0] != null) && s--); + a = i < 0; + b = k == l; - more = rem[0] != null; + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; - // Leading zero? - if (!qc[0]) qc.splice(0, 1); - } + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; - if (base == BASE) { + j = (k = xc.length) < (l = yc.length) ? k : l; - // To calculate q.e, first get the number of digits of qc[0]. - for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; - round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } - // Caller is convertBase. - } else { - q.e = e; - q.r = +more; - } - return q; - }; - })(); + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } - /* - * Return a string representing the value of BigNumber n in fixed-point or exponential - * notation rounded to the specified decimal places or significant digits. - * - * n: a BigNumber. - * i: the index of the last digit required (i.e. the digit that may be rounded up). - * rm: the rounding mode. - * id: 1 (toExponential) or 2 (toPrecision). - */ - function format(n, i, rm, id) { - var c0, e, ne, len, str; + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); - if (!n.c) return n.toString(); + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } - c0 = n.c[0]; - ne = n.e; - if (i == null) { - str = coeffToString(n.c); - str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) - ? toExponential(str, ne) - : toFixedPoint(str, ne, '0'); - } else { - n = round(new BigNumber(n), i, rm); + function toFixedPoint(str, e, z) { + var len, zs; - // n.e may have changed if the value was rounded up. - e = n.e; + // Negative exponent? + if (e < 0) { - str = coeffToString(n.c); - len = str.length; + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; - // toPrecision returns exponential notation if the number of significant digits - // specified is less than the number of digits necessary to represent the integer - // part of the value in fixed-point notation. + // Positive exponent + } else { + len = str.length; - // Exponential notation. - if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } - // Append zeros? - for (; len < i; str += '0', len++); - str = toExponential(str, e); + return str; + } - // Fixed-point notation. - } else { - i -= ne; - str = toFixedPoint(str, e, '0'); - // Append zeros? - if (e + 1 > len) { - if (--i > 0) for (str += '.'; i--; str += '0'); - } else { - i += e - len; - if (i > 0) { - if (e + 1 == len) str += '.'; - for (; i--; str += '0'); - } - } - } - } + // EXPORT - return n.s < 0 && c0 ? '-' + str : str; - } + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; - // Handle BigNumber.max and BigNumber.min. - // If any number is NaN, return NaN. - function maxOrMin(args, n) { - var k, y, - i = 1, - x = new BigNumber(args[0]); + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); - for (; i < args.length; i++) { - y = new BigNumber(args[i]); - if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { - x = y; - } - } + // Node.js and other environments that support module.exports. + } else if ( true && module.exports) { + module.exports = BigNumber; - return x; + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; } + globalObject.BigNumber = BigNumber; + } +})(this); - /* - * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. - * Called by minus, plus and times. - */ - function normalise(n, c, e) { - var i = 1, - j = c.length; - // Remove trailing zeros. - for (; !c[--j]; c.pop()); +/***/ }), - // Calculate the base 10 exponent. First get the number of digits of c[0]. - for (j = c[0]; j >= 10; j /= 10, i++); +/***/ 33717: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Overflow? - if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { +var concatMap = __nccwpck_require__(86891); +var balanced = __nccwpck_require__(9417); - // Infinity. - n.c = n.e = null; +module.exports = expandTop; - // Underflow? - } else if (e < MIN_EXP) { +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; - // Zero. - n.c = [n.e = 0]; - } else { - n.e = e; - n.c = c; - } +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} - return n; - } +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} - // Handle values that fail the validity test in BigNumber. - parseNumeric = (function () { - var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, - dotAfter = /^([^.]+)\.$/, - dotBefore = /^\.([^.]+)$/, - isInfinityOrNaN = /^-?(Infinity|NaN)$/, - whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; - return function (x, str, isNum, b) { - var base, - s = isNum ? str : str.replace(whitespaceOrPlus, ''); +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; - // No exception on ±Infinity or NaN. - if (isInfinityOrNaN.test(s)) { - x.s = isNaN(s) ? null : s < 0 ? -1 : 1; - } else { - if (!isNum) { + var parts = []; + var m = balanced('{', '}', str); - // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i - s = s.replace(basePrefix, function (m, p1, p2) { - base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; - return !b || b == base ? p1 : m; - }); + if (!m) + return str.split(','); - if (b) { - base = b; + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); - // E.g. '1.' to '1', '.1' to '0.1' - s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); - } + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } - if (str != s) return new BigNumber(s, base); - } + parts.push.apply(parts, p); - // '[BigNumber Error] Not a number: {n}' - // '[BigNumber Error] Not a base {b} number: {n}' - if (BigNumber.DEBUG) { - throw Error - (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); - } + return parts; +} - // NaN - x.s = null; - } +function expandTop(str) { + if (!str) + return []; - x.c = x.e = null; - } - })(); + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + return expand(escapeBraces(str), true).map(unescapeBraces); +} - /* - * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. - * If r is truthy, it is known that there are more digits after the rounding digit. - */ - function round(x, sd, rm, r) { - var d, i, j, k, n, ni, rd, - xc = x.c, - pows10 = POWS_TEN; +function identity(e) { + return e; +} - // if x is not Infinity or NaN... - if (xc) { +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} - // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. - // n is a base 1e14 number, the value of the element of array x.c containing rd. - // ni is the index of n within x.c. - // d is the number of digits of n. - // i is the index of rd within n including leading zeros. - // j is the actual index of rd within n (if < 0, rd is a leading zero). - out: { +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} - // Get the number of digits of the first element of xc. - for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); - i = sd - d; +function expand(str, isTop) { + var expansions = []; - // If the rounding digit is in the first element of xc... - if (i < 0) { - i += LOG_BASE; - j = sd; - n = xc[ni = 0]; + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; - // Get the rounding digit at index j of n. - rd = mathfloor(n / pows10[d - j - 1] % 10); - } else { - ni = mathceil((i + 1) / LOG_BASE); + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } - if (ni >= xc.length) { + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } - if (r) { + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. - // Needed by sqrt. - for (; xc.length <= ni; xc.push(0)); - n = rd = 0; - d = 1; - i %= LOG_BASE; - j = i - LOG_BASE + 1; - } else { - break out; - } - } else { - n = k = xc[ni]; + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; - // Get the number of digits of n. - for (d = 1; k >= 10; k /= 10, d++); + var N; - // Get the index of rd within n. - i %= LOG_BASE; + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); - // Get the index of rd within n, adjusted for leading zeros. - // The number of leading zeros of n is given by LOG_BASE - d. - j = i - LOG_BASE + d; + N = []; - // Get the rounding digit at index j of n. - rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); - } + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } - r = r || sd < 0 || - - // Are there any non-zero digits after the rounding digit? - // The expression n % pows10[d - j - 1] returns all digits of n to the right - // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. - xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } - r = rm < 4 - ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) - : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + return expansions; +} - // Check whether the digit to the left of the rounding digit is odd. - ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || - rm == (x.s < 0 ? 8 : 7)); - if (sd < 1 || !xc[0]) { - xc.length = 0; - if (r) { +/***/ }), - // Convert sd to decimal places. - sd -= x.e + 1; +/***/ 9239: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 1, 0.1, 0.01, 0.001, 0.0001 etc. - xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; - x.e = -sd || 0; - } else { +"use strict"; +/*jshint node:true */ - // Zero. - xc[0] = x.e = 0; - } +var Buffer = (__nccwpck_require__(14300).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(14300).SlowBuffer); - return x; - } +module.exports = bufferEq; - // Remove excess digits. - if (i == 0) { - xc.length = ni; - k = 1; - ni--; - } else { - xc.length = ni + 1; - k = pows10[LOG_BASE - i]; +function bufferEq(a, b) { - // E.g. 56700 becomes 56000 if 7 is the rounding digit. - // j > 0 means i > number of leading zeros of n. - xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; - } + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } - // Round up? - if (r) { + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } - for (; ;) { + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} - // If the digit to be rounded up is in the first element of xc... - if (ni == 0) { +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; - // i will be the length of xc[0] before k is added. - for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); - j = xc[0] += k; - for (k = 1; j >= 10; j /= 10, k++); +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; - // if i != k the length has increased. - if (i != k) { - x.e++; - if (xc[0] == BASE) xc[0] = 1; - } - break; - } else { - xc[ni] += k; - if (xc[ni] != BASE) break; - xc[ni--] = 0; - k = 1; - } - } - } +/***/ }), - // Remove trailing zeros. - for (i = xc.length; xc[--i] === 0; xc.pop()); - } +/***/ 85443: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Overflow? Infinity. - if (x.e > MAX_EXP) { - x.c = x.e = null; +var util = __nccwpck_require__(73837); +var Stream = (__nccwpck_require__(12781).Stream); +var DelayedStream = __nccwpck_require__(18611); - // Underflow? Zero. - } else if (x.e < MIN_EXP) { - x.c = [x.e = 0]; - } - } +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; - return x; - } + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); +CombinedStream.create = function(options) { + var combinedStream = new this(); - function valueOf(n) { - var str, - e = n.e; + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; + } - if (e === null) return n.toString(); + return combinedStream; +}; - str = coeffToString(n.c); +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; - str = e <= TO_EXP_NEG || e >= TO_EXP_POS - ? toExponential(str, e) - : toFixedPoint(str, e, '0'); +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); - return n.s < 0 ? '-' + str : str; + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; } + this._handleErrors(stream); - // PROTOTYPE/INSTANCE METHODS - - - /* - * Return a new BigNumber whose value is the absolute value of this BigNumber. - */ - P.absoluteValue = P.abs = function () { - var x = new BigNumber(this); - if (x.s < 0) x.s = 1; - return x; - }; + if (this.pauseStreams) { + stream.pause(); + } + } + this._streams.push(stream); + return this; +}; - /* - * Return - * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), - * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), - * 0 if they have the same value, - * or null if the value of either is NaN. - */ - P.comparedTo = function (y, b) { - return compare(this, new BigNumber(y, b)); - }; +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; +CombinedStream.prototype._getNext = function() { + this._currentStream = null; - /* - * If dp is undefined or null or true or false, return the number of decimal places of the - * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. - * - * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this - * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or - * ROUNDING_MODE if rm is omitted. - * - * [dp] {number} Decimal places: integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - */ - P.decimalPlaces = P.dp = function (dp, rm) { - var c, n, v, - x = this; + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call + } - if (dp != null) { - intCheck(dp, 0, MAX); - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; - return round(new BigNumber(x), dp + x.e + 1, rm); - } +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); - if (!(c = x.c)) return null; - n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; - // Subtract the number of trailing zeros of the last number. - if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); - if (n < 0) n = 0; + if (typeof stream == 'undefined') { + this.end(); + return; + } - return n; - }; + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; + } + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } - /* - * n / 0 = I - * n / N = N - * n / I = 0 - * 0 / n = 0 - * 0 / 0 = N - * 0 / N = N - * 0 / I = 0 - * N / n = N - * N / 0 = N - * N / N = N - * N / I = N - * I / n = I - * I / 0 = I - * I / N = N - * I / I = N - * - * Return a new BigNumber whose value is the value of this BigNumber divided by the value of - * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. - */ - P.dividedBy = P.div = function (y, b) { - return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); - }; + this._pipeNext(stream); + }.bind(this)); +}; +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; - /* - * Return a new BigNumber whose value is the integer part of dividing the value of this - * BigNumber by the value of BigNumber(y, b). - */ - P.dividedToIntegerBy = P.idiv = function (y, b) { - return div(this, new BigNumber(y, b), 0, 1); - }; + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } + var value = stream; + this.write(value); + this._getNext(); +}; - /* - * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. - * - * If m is present, return the result modulo m. - * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. - * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. - * - * The modular power operation works efficiently when x, n, and m are integers, otherwise it - * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. - * - * n {number|string|BigNumber} The exponent. An integer. - * [m] {number|string|BigNumber} The modulus. - * - * '[BigNumber Error] Exponent not an integer: {n}' - */ - P.exponentiatedBy = P.pow = function (n, m) { - var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, - x = this; +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; - n = new BigNumber(n); +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; - // Allow NaN and ±Infinity, but not other non-integers. - if (n.c && !n.isInteger()) { - throw Error - (bignumberError + 'Exponent not an integer: ' + valueOf(n)); - } +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; + } - if (m != null) m = new BigNumber(m); + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; - // Exponent of MAX_SAFE_INTEGER is 15. - nIsBig = n.e > 14; +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } - // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. - if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; - // The sign of the result of pow when x is negative depends on the evenness of n. - // If +n overflows to ±Infinity, the evenness of n would be not be known. - y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); - return m ? y.mod(m) : y; - } +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; - nIsNeg = n.s < 0; +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; - if (m) { +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; - // x % m returns NaN if abs(m) is zero, or m is NaN. - if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } - isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; - if (isModExp) x = x.mod(m); +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; - // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. - // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. - } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 - // [1, 240000000] - ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 - // [80000000000000] [99999750000000] - : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } - // If x is negative and n is odd, k = -0, else k = 0. - k = x.s < 0 && isOdd(n) ? -0 : 0; + self.dataSize += stream.dataSize; + }); - // If x >= 1, k = ±Infinity. - if (x.e > -1) k = 1 / k; + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; - // If n is negative return ±0, else return ±Infinity. - return new BigNumber(nIsNeg ? 1 / k : k); +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; - } else if (POW_PRECISION) { - // Truncating each coefficient array to a length of k after each multiplication - // equates to truncating significant digits to POW_PRECISION + [28, 41], - // i.e. there will be a minimum of 28 guard digits retained. - k = mathceil(POW_PRECISION / LOG_BASE + 2); - } +/***/ }), - if (nIsBig) { - half = new BigNumber(0.5); - if (nIsNeg) n.s = 1; - nIsOdd = isOdd(n); - } else { - i = Math.abs(+valueOf(n)); - nIsOdd = i % 2; - } +/***/ 86891: +/***/ ((module) => { - y = new BigNumber(ONE); +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; - // Performs 54 loop iterations for n of 9007199254740991. - for (; ;) { +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; - if (nIsOdd) { - y = y.times(x); - if (!y.c) break; - if (k) { - if (y.c.length > k) y.c.length = k; - } else if (isModExp) { - y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); - } - } +/***/ }), - if (i) { - i = mathfloor(i / 2); - if (i === 0) break; - nIsOdd = i % 2; - } else { - n = n.times(half); - round(n, n.e + 1, 1); +/***/ 28222: +/***/ ((module, exports, __nccwpck_require__) => { - if (n.e > 14) { - nIsOdd = isOdd(n); - } else { - i = +valueOf(n); - if (i === 0) break; - nIsOdd = i % 2; - } - } +/* eslint-env browser */ - x = x.times(x); +/** + * This is the web browser implementation of `debug()`. + */ - if (k) { - if (x.c && x.c.length > k) x.c.length = k; - } else if (isModExp) { - x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); - } - } +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; - if (isModExp) return y; - if (nIsNeg) y = ONE.div(y); + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); - return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; - }; +/** + * Colors. + */ +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; - /* - * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer - * using rounding mode rm, or ROUNDING_MODE if rm is omitted. - * - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' - */ - P.integerValue = function (rm) { - var n = new BigNumber(this); - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); - return round(n, n.e + 1, rm); - }; +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } - /* - * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), - * otherwise return false. - */ - P.isEqualTo = P.eq = function (y, b) { - return compare(this, new BigNumber(y, b)) === 0; - }; + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} - /* - * Return true if the value of this BigNumber is a finite number, otherwise return false. - */ - P.isFinite = function () { - return !!this.c; - }; +/** + * Colorize log arguments if enabled. + * + * @api public + */ +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); - /* - * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), - * otherwise return false. - */ - P.isGreaterThan = P.gt = function (y, b) { - return compare(this, new BigNumber(y, b)) > 0; - }; + if (!this.useColors) { + return; + } + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); - /* - * Return true if the value of this BigNumber is greater than or equal to the value of - * BigNumber(y, b), otherwise return false. - */ - P.isGreaterThanOrEqualTo = P.gte = function (y, b) { - return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); - }; + args.splice(lastC, 0, c); +} +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); - /* - * Return true if the value of this BigNumber is an integer, otherwise return false. - */ - P.isInteger = function () { - return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; - }; +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } - /* - * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), - * otherwise return false. - */ - P.isLessThan = P.lt = function (y, b) { - return compare(this, new BigNumber(y, b)) < 0; - }; + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + return r; +} - /* - * Return true if the value of this BigNumber is less than or equal to the value of - * BigNumber(y, b), otherwise return false. - */ - P.isLessThanOrEqualTo = P.lte = function (y, b) { - return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; - }; +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} - /* - * Return true if the value of this BigNumber is NaN, otherwise return false. - */ - P.isNaN = function () { - return !this.s; - }; +module.exports = __nccwpck_require__(46243)(exports); +const {formatters} = module.exports; - /* - * Return true if the value of this BigNumber is negative, otherwise return false. - */ - P.isNegative = function () { - return this.s < 0; - }; +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; - /* - * Return true if the value of this BigNumber is positive, otherwise return false. - */ - P.isPositive = function () { - return this.s > 0; - }; +/***/ }), - /* - * Return true if the value of this BigNumber is 0 or -0, otherwise return false. - */ - P.isZero = function () { - return !!this.c && this.c[0] == 0; - }; +/***/ 46243: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /* - * n - 0 = n - * n - N = N - * n - I = -I - * 0 - n = -n - * 0 - 0 = 0 - * 0 - N = N - * 0 - I = -I - * N - n = N - * N - 0 = N - * N - N = N - * N - I = N - * I - n = I - * I - 0 = I - * I - N = N - * I - I = N - * - * Return a new BigNumber whose value is the value of this BigNumber minus the value of - * BigNumber(y, b). - */ - P.minus = function (y, b) { - var i, j, t, xLTy, - x = this, - a = x.s; +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ - y = new BigNumber(y, b); - b = y.s; +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(80900); + createDebug.destroy = destroy; - // Either NaN? - if (!a || !b) return new BigNumber(NaN); + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); - // Signs differ? - if (a != b) { - y.s = -b; - return x.plus(y); - } + /** + * The currently active debug mode names, and names to skip. + */ - var xe = x.e / LOG_BASE, - ye = y.e / LOG_BASE, - xc = x.c, - yc = y.c; + createDebug.names = []; + createDebug.skips = []; - if (!xe || !ye) { + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; - // Either Infinity? - if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; - // Either zero? - if (!xc[0] || !yc[0]) { + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } - // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. - return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; - // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity - ROUNDING_MODE == 3 ? -0 : 0); - } - } + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; - xe = bitFloor(xe); - ye = bitFloor(ye); - xc = xc.slice(); + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } - // Determine which is the bigger number. - if (a = xe - ye) { + const self = debug; - if (xLTy = a < 0) { - a = -a; - t = xc; - } else { - ye = xe; - t = yc; - } + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; - t.reverse(); + args[0] = createDebug.coerce(args[0]); - // Prepend zeros to equalise exponents. - for (b = a; b--; t.push(0)); - t.reverse(); - } else { + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } - // Exponents equal. Check digit by digit. - j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); - for (a = b = 0; b < j; b++) { + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); - if (xc[b] != yc[b]) { - xLTy = xc[b] < yc[b]; - break; - } - } - } + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); - // x < y? Point xc to the array of the bigger number. - if (xLTy) { - t = xc; - xc = yc; - yc = t; - y.s = -y.s; - } + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } - b = (j = yc.length) - (i = xc.length); + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } - // Append zeros to xc if shorter. - // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. - if (b > 0) for (; b--; xc[i++] = 0); - b = BASE - 1; + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); - // Subtract yc from xc. - for (; j > a;) { + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } - if (xc[--j] < yc[j]) { - for (i = j; i && !xc[--i]; xc[i] = b); - --xc[i]; - xc[j] += BASE; - } + return debug; + } - xc[j] -= yc[j]; - } + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } - // Remove leading zeros and adjust exponent accordingly. - for (; xc[0] == 0; xc.splice(0, 1), --ye); + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; - // Zero? - if (!xc[0]) { + createDebug.names = []; + createDebug.skips = []; - // Following IEEE 754 (2008) 6.3, - // n - n = +0 but n - n = -0 when rounding towards -Infinity. - y.s = ROUNDING_MODE == 3 ? -1 : 1; - y.c = [y.e = 0]; - return y; - } + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; - // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity - // for finite x and y. - return normalise(y, xc, ye); - }; + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + namespaces = split[i].replace(/\*/g, '.*?'); - /* - * n % 0 = N - * n % N = N - * n % I = n - * 0 % n = 0 - * -0 % n = -0 - * 0 % 0 = N - * 0 % N = N - * 0 % I = 0 - * N % n = N - * N % 0 = N - * N % N = N - * N % I = N - * I % n = N - * I % 0 = N - * I % N = N - * I % I = N - * - * Return a new BigNumber whose value is the value of this BigNumber modulo the value of - * BigNumber(y, b). The result depends on the value of MODULO_MODE. - */ - P.modulo = P.mod = function (y, b) { - var q, s, - x = this; + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } - y = new BigNumber(y, b); + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } - // Return NaN if x is Infinity or NaN, or y is NaN or zero. - if (!x.c || !y.s || y.c && !y.c[0]) { - return new BigNumber(NaN); + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } - // Return x if y is Infinity or x is zero. - } else if (!y.c || x.c && !x.c[0]) { - return new BigNumber(x); - } + let i; + let len; - if (MODULO_MODE == 9) { + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } - // Euclidian division: q = sign(y) * floor(x / abs(y)) - // r = x - qy where 0 <= r < abs(y) - s = y.s; - y.s = 1; - q = div(x, y, 0, 3); - y.s = s; - q.s *= s; - } else { - q = div(x, y, 0, MODULO_MODE); - } + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } - y = x.minus(q.times(y)); + return false; + } - // To match JavaScript %, ensure sign of zero is sign of dividend. - if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } - return y; - }; + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } - /* - * n * 0 = 0 - * n * N = N - * n * I = I - * 0 * n = 0 - * 0 * 0 = 0 - * 0 * N = N - * 0 * I = N - * N * n = N - * N * 0 = N - * N * N = N - * N * I = N - * I * n = I - * I * 0 = N - * I * N = N - * I * I = I - * - * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value - * of BigNumber(y, b). - */ - P.multipliedBy = P.times = function (y, b) { - var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, - base, sqrtBase, - x = this, - xc = x.c, - yc = (y = new BigNumber(y, b)).c; + createDebug.enable(createDebug.load()); - // Either NaN, ±Infinity or ±0? - if (!xc || !yc || !xc[0] || !yc[0]) { + return createDebug; +} - // Return NaN if either is NaN, or one is 0 and the other is Infinity. - if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { - y.c = y.e = y.s = null; - } else { - y.s *= x.s; +module.exports = setup; - // Return ±Infinity if either is ±Infinity. - if (!xc || !yc) { - y.c = y.e = null; - // Return ±0 if either is ±0. - } else { - y.c = [0]; - y.e = 0; - } - } +/***/ }), - return y; - } +/***/ 38237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); - y.s *= x.s; - xcL = xc.length; - ycL = yc.length; +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ - // Ensure xc points to longer array and xcL to its length. - if (xcL < ycL) { - zc = xc; - xc = yc; - yc = zc; - i = xcL; - xcL = ycL; - ycL = i; - } +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(28222); +} else { + module.exports = __nccwpck_require__(35332); +} - // Initialise the result array with zeros. - for (i = xcL + ycL, zc = []; i--; zc.push(0)); - base = BASE; - sqrtBase = SQRT_BASE; +/***/ }), - for (i = ycL; --i >= 0;) { - c = 0; - ylo = yc[i] % sqrtBase; - yhi = yc[i] / sqrtBase | 0; +/***/ 35332: +/***/ ((module, exports, __nccwpck_require__) => { - for (k = xcL, j = i + k; j > i;) { - xlo = xc[--k] % sqrtBase; - xhi = xc[k] / sqrtBase | 0; - m = yhi * xlo + xhi * ylo; - xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; - c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; - zc[j--] = xlo % base; - } +/** + * Module dependencies. + */ - zc[j] = c; - } +const tty = __nccwpck_require__(76224); +const util = __nccwpck_require__(73837); - if (c) { - ++e; - } else { - zc.splice(0, 1); - } +/** + * This is the Node.js implementation of `debug()`. + */ - return normalise(y, zc, e); - }; +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); +/** + * Colors. + */ - /* - * Return a new BigNumber whose value is the value of this BigNumber negated, - * i.e. multiplied by -1. - */ - P.negated = function () { - var x = new BigNumber(this); - x.s = -x.s || null; - return x; - }; +exports.colors = [6, 2, 3, 4, 5, 1]; +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(59318); - /* - * n + 0 = n - * n + N = N - * n + I = I - * 0 + n = n - * 0 + 0 = 0 - * 0 + N = N - * 0 + I = I - * N + n = N - * N + 0 = N - * N + N = N - * N + I = N - * I + n = I - * I + 0 = I - * I + N = N - * I + I = I - * - * Return a new BigNumber whose value is the value of this BigNumber plus the value of - * BigNumber(y, b). - */ - P.plus = function (y, b) { - var t, - x = this, - a = x.s; + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} - y = new BigNumber(y, b); - b = y.s; +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ - // Either NaN? - if (!a || !b) return new BigNumber(NaN); +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); - // Signs differ? - if (a != b) { - y.s = -b; - return x.minus(y); - } + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } - var xe = x.e / LOG_BASE, - ye = y.e / LOG_BASE, - xc = x.c, - yc = y.c; + obj[prop] = val; + return obj; +}, {}); - if (!xe || !ye) { +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ - // Return ±Infinity if either ±Infinity. - if (!xc || !yc) return new BigNumber(a / 0); +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} - // Either zero? - // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. - if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); - } +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ - xe = bitFloor(xe); - ye = bitFloor(ye); - xc = xc.slice(); +function formatArgs(args) { + const {namespace: name, useColors} = this; - // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. - if (a = xe - ye) { - if (a > 0) { - ye = xe; - t = yc; - } else { - a = -a; - t = xc; - } + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; - t.reverse(); - for (; a--; t.push(0)); - t.reverse(); - } + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} - a = xc.length; - b = yc.length; +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} - // Point xc to the longer array, and b to the shorter length. - if (a - b < 0) { - t = yc; - yc = xc; - xc = t; - b = a; - } +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ - // Only start adding at yc.length - 1 as the further digits of xc can be ignored. - for (a = 0; b;) { - a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; - xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; - } +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} - if (a) { - xc = [a].concat(xc); - ++ye; - } +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} - // No need to check for zero, as +x + +y != 0 && -x + -y != 0 - // ye = MAX_EXP + 1 possible - return normalise(y, xc, ye); - }; +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + return process.env.DEBUG; +} - /* - * If sd is undefined or null or true or false, return the number of significant digits of - * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. - * If sd is true include integer-part trailing zeros in the count. - * - * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this - * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or - * ROUNDING_MODE if rm is omitted. - * - * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. - * boolean: whether to count integer-part trailing zeros: true or false. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' - */ - P.precision = P.sd = function (sd, rm) { - var c, n, v, - x = this; +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ - if (sd != null && sd !== !!sd) { - intCheck(sd, 1, MAX); - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); +function init(debug) { + debug.inspectOpts = {}; - return round(new BigNumber(x), sd, rm); - } + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} - if (!(c = x.c)) return null; - v = c.length - 1; - n = v * LOG_BASE + 1; +module.exports = __nccwpck_require__(46243)(exports); - if (v = c[v]) { +const {formatters} = module.exports; - // Subtract the number of trailing zeros of the last element. - for (; v % 10 == 0; v /= 10, n--); +/** + * Map %o to `util.inspect()`, all on a single line. + */ - // Add the number of digits of the first element. - for (v = c[0]; v >= 10; v /= 10, n++); - } +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; - if (sd && x.e + 1 > n) n = x.e + 1; +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ - return n; - }; +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; - /* - * Return a new BigNumber whose value is the value of this BigNumber shifted by k places - * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. - * - * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' - */ - P.shiftedBy = function (k) { - intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); - return this.times('1e' + k); - }; +/***/ }), +/***/ 18611: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /* - * sqrt(-n) = N - * sqrt(N) = N - * sqrt(-I) = N - * sqrt(I) = I - * sqrt(0) = 0 - * sqrt(-0) = -0 - * - * Return a new BigNumber whose value is the square root of the value of this BigNumber, - * rounded according to DECIMAL_PLACES and ROUNDING_MODE. - */ - P.squareRoot = P.sqrt = function () { - var m, n, r, rep, t, - x = this, - c = x.c, - s = x.s, - e = x.e, - dp = DECIMAL_PLACES + 4, - half = new BigNumber('0.5'); +var Stream = (__nccwpck_require__(12781).Stream); +var util = __nccwpck_require__(73837); - // Negative/NaN/Infinity/zero? - if (s !== 1 || !c || !c[0]) { - return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); - } +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; - // Initial estimate. - s = Math.sqrt(+valueOf(x)); + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); - // Math.sqrt underflow/overflow? - // Pass x to Math.sqrt as integer, then adjust the exponent of the result. - if (s == 0 || s == 1 / 0) { - n = coeffToString(c); - if ((n.length + e) % 2 == 0) n += '0'; - s = Math.sqrt(+n); - e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); +DelayedStream.create = function(source, options) { + var delayedStream = new this(); - if (s == 1 / 0) { - n = '5e' + e; - } else { - n = s.toExponential(); - n = n.slice(0, n.indexOf('e') + 1) + e; - } + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; + } - r = new BigNumber(n); - } else { - r = new BigNumber(s + ''); - } + delayedStream.source = source; - // Check for zero. - // r could be zero if MIN_EXP is changed after the this value was created. - // This would cause a division by zero (x/t) and hence Infinity below, which would cause - // coeffToString to throw. - if (r.c[0]) { - e = r.e; - s = e + dp; - if (s < 3) s = 0; + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; - // Newton-Raphson iteration. - for (; ;) { - t = r; - r = half.times(t.plus(div(x, t, dp, 1))); + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); + } - if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + return delayedStream; +}; - // The exponent of r may here be one less than the final result exponent, - // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits - // are indexed correctly. - if (r.e < e) --s; - n = n.slice(s - 3, s + 1); +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; + } +}); - // The 4th rounding digit may be in error by -1 so if the 4 rounding digits - // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the - // iteration. - if (n == '9999' || !rep && n == '4999') { +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; - // On the first iteration only, check to see if rounding up gives the - // exact result as the nines may infinitely repeat. - if (!rep) { - round(t, t.e + DECIMAL_PLACES + 2, 0); +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); + } - if (t.times(t).eq(x)) { - r = t; - break; - } - } + this.source.resume(); +}; - dp += 4; - s += 4; - rep = 1; - } else { +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; - // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact - // result. If not, then there are further digits and m will be truthy. - if (!+n || !+n.slice(1) && n.charAt(0) == '5') { +DelayedStream.prototype.release = function() { + this._released = true; - // Truncate to the first rounding digit. - round(r, r.e + DECIMAL_PLACES + 2, 1); - m = !r.times(r).eq(x); - } + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; - break; - } - } - } - } +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; - return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); - }; +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); + } - /* - * Return a string representing the value of this BigNumber in exponential notation and - * rounded using ROUNDING_MODE to dp fixed decimal places. - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - */ - P.toExponential = function (dp, rm) { - if (dp != null) { - intCheck(dp, 0, MAX); - dp++; - } - return format(this, dp, rm, 1); - }; + this._bufferedEvents.push(args); +}; +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; + } - /* - * Return a string representing the value of this BigNumber in fixed-point notation rounding - * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. - * - * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', - * but e.g. (-0.00001).toFixed(0) is '-0'. - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - */ - P.toFixed = function (dp, rm) { - if (dp != null) { - intCheck(dp, 0, MAX); - dp = dp + this.e + 1; - } - return format(this, dp, rm); - }; + if (this.dataSize <= this.maxDataSize) { + return; + } + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; - /* - * Return a string representing the value of this BigNumber in fixed-point notation rounded - * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties - * of the format or FORMAT object (see BigNumber.set). - * - * The formatting object may contain some or all of the properties shown below. - * - * FORMAT = { - * prefix: '', - * groupSize: 3, - * secondaryGroupSize: 0, - * groupSeparator: ',', - * decimalSeparator: '.', - * fractionGroupSize: 0, - * fractionGroupSeparator: '\xA0', // non-breaking space - * suffix: '' - * }; - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * [format] {object} Formatting options. See FORMAT pbject above. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - * '[BigNumber Error] Argument not an object: {format}' - */ - P.toFormat = function (dp, rm, format) { - var str, - x = this; - if (format == null) { - if (dp != null && rm && typeof rm == 'object') { - format = rm; - rm = null; - } else if (dp && typeof dp == 'object') { - format = dp; - dp = rm = null; - } else { - format = FORMAT; - } - } else if (typeof format != 'object') { - throw Error - (bignumberError + 'Argument not an object: ' + format); - } +/***/ }), - str = x.toFixed(dp, rm); +/***/ 58932: +/***/ ((__unused_webpack_module, exports) => { - if (x.c) { - var i, - arr = str.split('.'), - g1 = +format.groupSize, - g2 = +format.secondaryGroupSize, - groupSeparator = format.groupSeparator || '', - intPart = arr[0], - fractionPart = arr[1], - isNeg = x.s < 0, - intDigits = isNeg ? intPart.slice(1) : intPart, - len = intDigits.length; +"use strict"; - if (g2) { - i = g1; - g1 = g2; - g2 = i; - len -= i; - } - if (g1 > 0 && len > 0) { - i = len % g1 || g1; - intPart = intDigits.substr(0, i); - for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); - if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); - if (isNeg) intPart = '-' + intPart; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - str = fractionPart - ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) - ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), - '$&' + (format.fractionGroupSeparator || '')) - : fractionPart) - : intPart; - } +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) - return (format.prefix || '') + str + (format.suffix || ''); - }; + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } - /* - * Return an array of two BigNumbers representing the value of this BigNumber as a simple - * fraction with an integer numerator and an integer denominator. - * The denominator will be a positive non-zero value less than or equal to the specified - * maximum denominator. If a maximum denominator is not specified, the denominator will be - * the lowest value necessary to represent the number exactly. - * - * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. - * - * '[BigNumber Error] Argument {not an integer|out of range} : {md}' - */ - P.toFraction = function (md) { - var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, - x = this, - xc = x.c; + this.name = 'Deprecation'; + } - if (md != null) { - n = new BigNumber(md); +} - // Throw if md is less than one or is not an integer, unless it is Infinity. - if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { - throw Error - (bignumberError + 'Argument ' + - (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); - } - } +exports.Deprecation = Deprecation; - if (!xc) return new BigNumber(x); - d = new BigNumber(ONE); - n1 = d0 = new BigNumber(ONE); - d1 = n0 = new BigNumber(ONE); - s = coeffToString(xc); +/***/ }), - // Determine initial denominator. - // d is a power of 10 and the minimum max denominator that specifies the value exactly. - e = d.e = s.length - x.e - 1; - d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; - md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; +/***/ 76599: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - exp = MAX_EXP; - MAX_EXP = 1 / 0; - n = new BigNumber(s); +var stream = __nccwpck_require__(51642) +var eos = __nccwpck_require__(81205) +var inherits = __nccwpck_require__(44124) +var shift = __nccwpck_require__(66121) - // n0 = d1 = 0 - n0.c[0] = 0; +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) - for (; ;) { - q = div(n, d, 0, 1); - d2 = d0.plus(q.times(d1)); - if (d2.comparedTo(md) == 1) break; - d0 = d1; - d1 = d2; - n1 = n0.plus(q.times(d2 = n1)); - n0 = d2; - d = n.minus(q.times(d2 = d)); - n = d2; - } +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} - d2 = div(md.minus(d0), d1, 0, 1); - n0 = n0.plus(d2.times(n1)); - d0 = d0.plus(d2.times(d1)); - n0.s = n1.s = x.s; - e = e * 2; +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} - // Determine which fraction is closer to x, n0/d0 or n1/d1 - r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( - div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} - MAX_EXP = exp; +var noop = function() {} - return r; - }; +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) - /* - * Return the value of this BigNumber converted to a number primitive. - */ - P.toNumber = function () { - return +valueOf(this); - }; + this._writable = null + this._readable = null + this._readable2 = null + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false - /* - * Return a string representing the value of this BigNumber rounded to sd significant digits - * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits - * necessary to represent the integer part of the value in fixed-point notation, then use - * exponential notation. - * - * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' - */ - P.toPrecision = function (sd, rm) { - if (sd != null) intCheck(sd, 1, MAX); - return format(this, sd, rm, 2); - }; + this.destroyed = false + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} - /* - * Return a string representing the value of this BigNumber in base b, or base 10 if b is - * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and - * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent - * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than - * TO_EXP_NEG, return exponential notation. - * - * [b] {number} Integer, 2 to ALPHABET.length inclusive. - * - * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' - */ - P.toString = function (b) { - var str, - n = this, - s = n.s, - e = n.e; +inherits(Duplexify, stream.Duplex) - // Infinity or NaN? - if (e === null) { - if (s) { - str = 'Infinity'; - if (s < 0) str = '-' + str; - } else { - str = 'NaN'; - } - } else { - if (b == null) { - str = e <= TO_EXP_NEG || e >= TO_EXP_POS - ? toExponential(coeffToString(n.c), e) - : toFixedPoint(coeffToString(n.c), e, '0'); - } else if (b === 10 && alphabetHasNormalDecimalDigits) { - n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); - str = toFixedPoint(coeffToString(n.c), n.e, '0'); - } else { - intCheck(b, 2, ALPHABET.length, 'Base'); - str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); - } +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} - if (s < 0 && n.c[0]) str = '-' + str; - } +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} - return str; - }; +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() - /* - * Return as toString, but do not accept a base argument, and include the minus sign for - * negative zero. - */ - P.valueOf = P.toJSON = function () { - return valueOf(this); - }; + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + if (writable === null || writable === false) { + this.end() + return + } - P._isBigNumber = true; + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) - if (configObject != null) BigNumber.set(configObject); + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } - return BigNumber; + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() } + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks - // PRIVATE HELPER FUNCTIONS + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear - // These functions don't need access to variables, - // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + this.uncork() // always uncork setWritable +} +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() - function bitFloor(n) { - var i = n | 0; - return n > 0 || n === i ? i : i - 1; + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return } + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } - // Return a coefficient array as a string of base 10 digits. - function coeffToString(a) { - var s, z, - i = 1, - j = a.length, - r = a[0] + ''; - - for (; i < j;) { - s = a[i++] + ''; - z = LOG_BASE - s.length; - for (; z--; s = '0' + s); - r += s; - } + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) - // Determine trailing zeros. - for (j = r.length; r.charCodeAt(--j) === 48;); + var onreadable = function() { + self._forward() + } - return r.slice(0, j + 1 || 1); + var onend = function() { + self.push(null) } + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } - // Compare the value of BigNumbers x and y. - function compare(x, y) { - var a, b, - xc = x.c, - yc = y.c, - i = x.s, - j = y.s, - k = x.e, - l = y.e; + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear - // Either NaN? - if (!i || !j) return null; + this._forward() +} - a = xc && !xc[0]; - b = yc && !yc[0]; +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} - // Either zero? - if (a || b) return a ? b ? 0 : -j : i; +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true - // Signs differ? - if (i != j) return i; + var data - a = i < 0; - b = k == l; + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } - // Either Infinity? - if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + this._forwarding = false +} - // Compare exponents. - if (!b) return k > l ^ a ? 1 : -1; +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true - j = (k = xc.length) < (l = yc.length) ? k : l; + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} - // Compare digit by digit. - for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } - // Compare lengths. - return k == l ? 0 : k > l ^ a ? 1 : -1; + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() } + this.emit('close') +} - /* - * Check that n is a primitive number, an integer, and in range, otherwise throw. - */ - function intCheck(n, min, max, name) { - if (n < min || n > max || n !== mathfloor(n)) { - throw Error - (bignumberError + (name || 'Argument') + (typeof n == 'number' - ? n < min || n > max ? ' out of range: ' : ' not an integer: ' - : ' not a primitive number: ') + String(n)); - } - } +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} - // Assumes finite n. - function isOdd(n) { - var k = n.c.length - 1; - return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; - } +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} - function toExponential(str, e) { - return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + - (e < 0 ? 'e' : 'e+') + e; - } +module.exports = Duplexify - function toFixedPoint(str, e, z) { - var len, zs; +/***/ }), - // Negative exponent? - if (e < 0) { +/***/ 11728: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Prepend zeros. - for (zs = z + '.'; ++e; zs += z); - str = zs + str; +"use strict"; - // Positive exponent - } else { - len = str.length; - // Append zeros. - if (++e > len) { - for (zs = z, e -= len; --e; zs += z); - str += zs; - } else if (e < len) { - str = str.slice(0, e) + '.' + str.slice(e); - } - } +var Buffer = (__nccwpck_require__(21867).Buffer); - return str; - } +var getParamBytesForAlg = __nccwpck_require__(30528); +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); - // EXPORT +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } - BigNumber = clone(); - BigNumber['default'] = BigNumber.BigNumber = BigNumber; + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} - // AMD. - if (typeof define == 'function' && define.amd) { - define(function () { return BigNumber; }); +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); - // Node.js and other environments that support module.exports. - } else if ( true && module.exports) { - module.exports = BigNumber; + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; - // Browser. - } else { - if (!globalObject) { - globalObject = typeof self != 'undefined' && self ? self : window; - } + var inputLength = signature.length; - globalObject.BigNumber = BigNumber; - } -})(this); + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } -/***/ }), + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } -/***/ 3717: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } -var concatMap = __nccwpck_require__(6891); -var balanced = __nccwpck_require__(9417); + var rLength = signature[offset++]; -module.exports = expandTop; + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} + var rOffset = offset; + offset += rLength; -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} + var sLength = signature[offset++]; + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } - var parts = []; - var m = balanced('{', '}', str); + var sOffset = offset; + offset += sLength; - if (!m) - return str.split(','); + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); - parts.push.apply(parts, p); + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); - return parts; + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; } -function expandTop(str) { - if (!str) - return []; +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } - return expand(escapeBraces(str), true).map(unescapeBraces); + return padding; } -function identity(e) { - return e; -} +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; -function expand(str, isTop) { - var expansions = []; + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; + var shortLength = rsBytes < MAX_OCTET; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. + return dst; +} - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; - var N; - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); +/***/ }), - N = []; +/***/ 30528: +/***/ ((module) => { - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } +"use strict"; - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - return expansions; +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); } +module.exports = getParamBytesForAlg; /***/ }), -/***/ 9239: +/***/ 81205: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -/*jshint node:true */ +var once = __nccwpck_require__(1223); -var Buffer = (__nccwpck_require__(4300).Buffer); // browserify -var SlowBuffer = (__nccwpck_require__(4300).SlowBuffer); +var noop = function() {}; -module.exports = bufferEq; +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; -function bufferEq(a, b) { +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; - // shortcutting on type is necessary for correctness - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - return false; - } +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; - // buffer sizes should be well-known information, so despite this - // shortcutting, it doesn't leak any information about the *contents* of the - // buffers. - if (a.length !== b.length) { - return false; - } + callback = once(callback || noop); - var c = 0; - for (var i = 0; i < a.length; i++) { - /*jshint bitwise:false */ - c |= a[i] ^ b[i]; // XOR - } - return c === 0; -} + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; -bufferEq.install = function() { - Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { - return bufferEq(this, that); - }; -}; + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; -var origBufEqual = Buffer.prototype.equal; -var origSlowBufEqual = SlowBuffer.prototype.equal; -bufferEq.restore = function() { - Buffer.prototype.equal = origBufEqual; - SlowBuffer.prototype.equal = origSlowBufEqual; -}; + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; -/***/ }), + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; -/***/ 5443: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var onerror = function(err) { + callback.call(stream, err); + }; -var util = __nccwpck_require__(3837); -var Stream = (__nccwpck_require__(2781).Stream); -var DelayedStream = __nccwpck_require__(8611); + var onclose = function() { + process.nextTick(onclosenexttick); + }; -module.exports = CombinedStream; -function CombinedStream() { - this.writable = false; - this.readable = true; - this.dataSize = 0; - this.maxDataSize = 2 * 1024 * 1024; - this.pauseStreams = true; + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; - this._released = false; - this._streams = []; - this._currentStream = null; - this._insideLoop = false; - this._pendingNext = false; -} -util.inherits(CombinedStream, Stream); + var onrequest = function() { + stream.req.on('finish', onfinish); + }; -CombinedStream.create = function(options) { - var combinedStream = new this(); + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } - options = options || {}; - for (var option in options) { - combinedStream[option] = options[option]; - } + if (isChildProcess(stream)) stream.on('exit', onexit); - return combinedStream; -}; + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); -CombinedStream.isStreamLike = function(stream) { - return (typeof stream !== 'function') - && (typeof stream !== 'string') - && (typeof stream !== 'boolean') - && (typeof stream !== 'number') - && (!Buffer.isBuffer(stream)); + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; }; -CombinedStream.prototype.append = function(stream) { - var isStreamLike = CombinedStream.isStreamLike(stream); - - if (isStreamLike) { - if (!(stream instanceof DelayedStream)) { - var newStream = DelayedStream.create(stream, { - maxDataSize: Infinity, - pauseStream: this.pauseStreams, - }); - stream.on('data', this._checkDataSize.bind(this)); - stream = newStream; - } +module.exports = eos; - this._handleErrors(stream); - if (this.pauseStreams) { - stream.pause(); - } - } +/***/ }), - this._streams.push(stream); - return this; -}; +/***/ 84697: +/***/ ((module, exports) => { -CombinedStream.prototype.pipe = function(dest, options) { - Stream.prototype.pipe.call(this, dest, options); - this.resume(); - return dest; -}; +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ -CombinedStream.prototype._getNext = function() { - this._currentStream = null; - if (this._insideLoop) { - this._pendingNext = true; - return; // defer call - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - this._insideLoop = true; - try { - do { - this._pendingNext = false; - this._realGetNext(); - } while (this._pendingNext); - } finally { - this._insideLoop = false; - } -}; +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ -CombinedStream.prototype._realGetNext = function() { - var stream = this._streams.shift(); +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); - if (typeof stream == 'undefined') { - this.end(); - return; - } +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} - if (typeof stream !== 'function') { - this._pipeNext(stream); - return; - } +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } - var getStream = stream; - getStream(function(stream) { - var isStreamLike = CombinedStream.isStreamLike(stream); - if (isStreamLike) { - stream.on('data', this._checkDataSize.bind(this)); - this._handleErrors(stream); + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); } +} - this._pipeNext(stream); - }.bind(this)); -}; +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); -CombinedStream.prototype._pipeNext = function(stream) { - this._currentStream = stream; + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); - var isStreamLike = CombinedStream.isStreamLike(stream); - if (isStreamLike) { - stream.on('end', this._getNext.bind(this)); - stream.pipe(this, {end: false}); - return; - } + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} - var value = stream; - this.write(value); - this._getNext(); -}; +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, -CombinedStream.prototype._handleErrors = function(stream) { - var self = this; - stream.on('error', function(err) { - self._emitError(err); - }); -}; + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, -CombinedStream.prototype.write = function(data) { - this.emit('data', data); -}; + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, -CombinedStream.prototype.pause = function() { - if (!this.pauseStreams) { - return; - } + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, - if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); - this.emit('pause'); -}; + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, -CombinedStream.prototype.resume = function() { - if (!this._released) { - this._released = true; - this.writable = true; - this._getNext(); - } + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, - if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); - this.emit('resume'); -}; + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, -CombinedStream.prototype.end = function() { - this._reset(); - this.emit('end'); -}; + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); -CombinedStream.prototype.destroy = function() { - this._reset(); - this.emit('close'); -}; + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, -CombinedStream.prototype._reset = function() { - this.writable = false; - this._streams = []; - this._currentStream = null; -}; + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); -CombinedStream.prototype._checkDataSize = function() { - this._updateDataSize(); - if (this.dataSize <= this.maxDataSize) { - return; - } + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, - var message = - 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; - this._emitError(new Error(message)); -}; + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, -CombinedStream.prototype._updateDataSize = function() { - this.dataSize = 0; + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, - var self = this; - this._streams.forEach(function(stream) { - if (!stream.dataSize) { - return; - } + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, - self.dataSize += stream.dataSize; - }); + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, - if (this._currentStream && this._currentStream.dataSize) { - this.dataSize += this._currentStream.dataSize; - } -}; + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, -CombinedStream.prototype._emitError = function(err) { - this._reset(); - this.emit('error', err); -}; + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, -/***/ }), + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); -/***/ 6891: -/***/ ((module) => { + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, -module.exports = function (xs, fn) { - var res = []; - for (var i = 0; i < xs.length; i++) { - var x = fn(xs[i], i); - if (isArray(x)) res.push.apply(res, x); - else res.push(x); - } - return res; -}; + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, -var isArray = Array.isArray || function (xs) { - return Object.prototype.toString.call(xs) === '[object Array]'; + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, }; +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); -/***/ }), - -/***/ 8222: -/***/ ((module, exports, __nccwpck_require__) => { +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); -/* eslint-env browser */ + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} /** - * This is the web browser implementation of `debug()`. + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private */ - -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.storage = localstorage(); -exports.destroy = (() => { - let warned = false; - - return () => { - if (!warned) { - warned = true; - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } - }; -})(); +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} /** - * Colors. + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private */ - -exports.colors = [ - '#0000CC', - '#0000FF', - '#0033CC', - '#0033FF', - '#0066CC', - '#0066FF', - '#0099CC', - '#0099FF', - '#00CC00', - '#00CC33', - '#00CC66', - '#00CC99', - '#00CCCC', - '#00CCFF', - '#3300CC', - '#3300FF', - '#3333CC', - '#3333FF', - '#3366CC', - '#3366FF', - '#3399CC', - '#3399FF', - '#33CC00', - '#33CC33', - '#33CC66', - '#33CC99', - '#33CCCC', - '#33CCFF', - '#6600CC', - '#6600FF', - '#6633CC', - '#6633FF', - '#66CC00', - '#66CC33', - '#9900CC', - '#9900FF', - '#9933CC', - '#9933FF', - '#99CC00', - '#99CC33', - '#CC0000', - '#CC0033', - '#CC0066', - '#CC0099', - '#CC00CC', - '#CC00FF', - '#CC3300', - '#CC3333', - '#CC3366', - '#CC3399', - '#CC33CC', - '#CC33FF', - '#CC6600', - '#CC6633', - '#CC9900', - '#CC9933', - '#CCCC00', - '#CCCC33', - '#FF0000', - '#FF0033', - '#FF0066', - '#FF0099', - '#FF00CC', - '#FF00FF', - '#FF3300', - '#FF3333', - '#FF3366', - '#FF3399', - '#FF33CC', - '#FF33FF', - '#FF6600', - '#FF6633', - '#FF9900', - '#FF9933', - '#FFCC00', - '#FFCC33' -]; +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} /** - * Currently only WebKit-based Web Inspectors, Firefox >= v31, - * and the Firebug extension (any Firefox version) are known - * to support "%c" CSS customizations. - * - * TODO: add a `localStorage` variable to explicitly enable/disable colors + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } -// eslint-disable-next-line complexity -function useColors() { - // NB: In an Electron preload script, document will be defined but not fully - // initialized. Since we know we're in Chrome, we'll just detect this case - // explicitly - if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { - return true; - } + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } - // Internet Explorer and Edge do not support colors. - if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); - // Is webkit? http://stackoverflow.com/a/16459606/376773 - // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 - return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || - // Is firebug? http://stackoverflow.com/a/398120/376773 - (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || - // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || - // Double check webkit in userAgent just in case we are in a worker - (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent } /** - * Colorize log arguments if enabled. - * - * @api public + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } -function formatArgs(args) { - args[0] = (this.useColors ? '%c' : '') + - this.namespace + - (this.useColors ? ' %c' : ' ') + - args[0] + - (this.useColors ? '%c ' : ' ') + - '+' + module.exports.humanize(this.diff); - - if (!this.useColors) { - return; - } - - const c = 'color: ' + this.color; - args.splice(1, 0, c, 'color: inherit'); + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} - // The final "%c" is somewhat tricky, because there could be other - // arguments passed either before or after the %c, so we need to - // figure out the correct index to insert the CSS into - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, match => { - if (match === '%%') { - return; - } - index++; - if (match === '%c') { - // We only are interested in the *last* %c - // (the user may have provided their own) - lastC = index; - } - }); +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} - args.splice(lastC, 0, c); +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped } /** - * Invokes `console.debug()` when available. - * No-op when `console.debug` is not a "function". - * If `console.debug` is not available, falls back - * to `console.log`. - * - * @api public + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private */ -exports.log = console.debug || console.log || (() => {}); +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} /** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private */ -function save(namespaces) { - try { - if (namespaces) { - exports.storage.setItem('debug', namespaces); - } else { - exports.storage.removeItem('debug'); - } - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; } /** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private */ -function load() { - let r; - try { - r = exports.storage.getItem('debug'); - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } - - // If debug isn't set in LS, and we're in Electron, try to load $DEBUG - if (!r && typeof process !== 'undefined' && 'env' in process) { - r = process.env.DEBUG; - } - - return r; +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; } /** - * Localstorage attempts to return the localstorage. - * - * This is necessary because safari throws - * when a user disables cookies/localstorage - * and you attempt to access it. - * - * @return {LocalStorage} - * @api private + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private */ -function localstorage() { - try { - // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context - // The Browser also has localStorage in the global context. - return localStorage; - } catch (error) { - // Swallow - // XXX (@Qix-) should we be logging these? - } -} - -module.exports = __nccwpck_require__(6243)(exports); - -const {formatters} = module.exports; - /** - * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + * @type {WeakMap>} + * @private */ +const listenersMap = new WeakMap(); -formatters.j = function (v) { - try { - return JSON.stringify(v); - } catch (error) { - return '[UnexpectedJSONParseError]: ' + error.message; - } -}; - - -/***/ }), - -/***/ 6243: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; /** - * This is the common logic for both the Node.js and web browser - * implementations of `debug()`. + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} -function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = __nccwpck_require__(900); - createDebug.destroy = destroy; - - Object.keys(env).forEach(key => { - createDebug[key] = env[key]; - }); - - /** - * The currently active debug mode names, and names to skip. - */ - - createDebug.names = []; - createDebug.skips = []; - - /** - * Map of special "%n" handling functions, for the debug "format" argument. - * - * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". - */ - createDebug.formatters = {}; - - /** - * Selects a color for a debug namespace - * @param {String} namespace The namespace string for the debug instance to be colored - * @return {Number|String} An ANSI color code for the given namespace - * @api private - */ - function selectColor(namespace) { - let hash = 0; - - for (let i = 0; i < namespace.length; i++) { - hash = ((hash << 5) - hash) + namespace.charCodeAt(i); - hash |= 0; // Convert to 32bit integer - } - - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; - } - createDebug.selectColor = selectColor; - - /** - * Create a debugger with the given `namespace`. - * - * @param {String} namespace - * @return {Function} - * @api public - */ - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - - function debug(...args) { - // Disabled? - if (!debug.enabled) { - return; - } - - const self = debug; - - // Set `diff` timestamp - const curr = Number(new Date()); - const ms = curr - (prevTime || curr); - self.diff = ms; - self.prev = prevTime; - self.curr = curr; - prevTime = curr; - - args[0] = createDebug.coerce(args[0]); - - if (typeof args[0] !== 'string') { - // Anything else let's inspect with %O - args.unshift('%O'); - } - - // Apply any `formatters` transformations - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - // If we encounter an escaped % then don't increase the array index - if (match === '%%') { - return '%'; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === 'function') { - const val = args[index]; - match = formatter.call(self, val); - - // Now we need to remove `args[index]` since it's inlined in the `format` - args.splice(index, 1); - index--; - } - return match; - }); - - // Apply env-specific formatting (colors, etc.) - createDebug.formatArgs.call(self, args); - - const logFn = self.log || createDebug.log; - logFn.apply(self, args); - } - - debug.namespace = namespace; - debug.useColors = createDebug.useColors(); - debug.color = createDebug.selectColor(namespace); - debug.extend = extend; - debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. - - Object.defineProperty(debug, 'enabled', { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); - } - - return enabledCache; - }, - set: v => { - enableOverride = v; - } - }); - - // Env-specific initialization logic for debug instances - if (typeof createDebug.init === 'function') { - createDebug.init(debug); - } - - return debug; - } - - function extend(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; - } - - /** - * Enables a debug mode by namespaces. This can include modes - * separated by a colon and wildcards. - * - * @param {String} namespaces - * @api public - */ - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - - createDebug.names = []; - createDebug.skips = []; - - let i; - const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); - const len = split.length; - - for (i = 0; i < len; i++) { - if (!split[i]) { - // ignore empty strings - continue; - } +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} - namespaces = split[i].replace(/\*/g, '.*?'); +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, - if (namespaces[0] === '-') { - createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); - } else { - createDebug.names.push(new RegExp('^' + namespaces + '$')); - } - } - } + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); - /** - * Disable debug output. - * - * @return {String} namespaces - * @api public - */ - function disable() { - const namespaces = [ - ...createDebug.names.map(toNamespace), - ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) - ].join(','); - createDebug.enable(''); - return namespaces; - } + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } - /** - * Returns true if the given mode name is enabled, false otherwise. - * - * @param {String} name - * @return {Boolean} - * @api public - */ - function enabled(name) { - if (name[name.length - 1] === '*') { - return true; - } + node = node.next; + } - let i; - let len; + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} - for (i = 0, len = createDebug.skips.length; i < len; i++) { - if (createDebug.skips[i].test(name)) { - return false; - } - } +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} - for (i = 0, len = createDebug.names.length; i < len; i++) { - if (createDebug.names[i].test(name)) { - return true; - } - } +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } - return false; - } + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); - /** - * Convert regexp to namespace - * - * @param {RegExp} regxep - * @return {String} namespace - * @api private - */ - function toNamespace(regexp) { - return regexp.toString() - .substring(2, regexp.toString().length - 2) - .replace(/\.\*\?$/, '*'); - } + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } - /** - * Coerce `val`. - * - * @param {Mixed} val - * @return {Mixed} - * @api private - */ - function coerce(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; - } + return CustomEventTarget +} - /** - * XXX DO NOT USE. This is a temporary stub function. - * XXX It WILL be removed in the next major release. - */ - function destroy() { - console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); - } +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} - createDebug.enable(createDebug.load()); +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } - return createDebug; -} + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; -module.exports = setup; + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } -/***/ }), + // Add it. + prev.next = newNode; + }, -/***/ 8237: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } -/** - * Detect Electron renderer / nwjs process, which is node, but we should - * treat as a browser. - */ + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; -if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { - module.exports = __nccwpck_require__(8222); -} else { - module.exports = __nccwpck_require__(4874); -} + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + prev = node; + node = node.next; + } + }, -/***/ }), + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } -/***/ 4874: -/***/ ((module, exports, __nccwpck_require__) => { + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } -/** - * Module dependencies. - */ + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); -const tty = __nccwpck_require__(6224); -const util = __nccwpck_require__(3837); + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } -/** - * This is the Node.js implementation of `debug()`. - */ + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } -exports.init = init; -exports.log = log; -exports.formatArgs = formatArgs; -exports.save = save; -exports.load = load; -exports.useColors = useColors; -exports.destroy = util.deprecate( - () => {}, - 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' -); + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } -/** - * Colors. - */ + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); -exports.colors = [6, 2, 3, 4, 5, 1]; + return !wrappedEvent.defaultPrevented + }, +}; -try { - // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) - // eslint-disable-next-line import/no-extraneous-dependencies - const supportsColor = __nccwpck_require__(9318); +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } -} catch (error) { - // Swallow - we only care if `supports-color` is available; it doesn't have to be. +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); } -/** - * Build up the default `inspectOpts` object from the environment variables. - * - * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js - */ +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; -exports.inspectOpts = Object.keys(process.env).filter(key => { - return /^debug_/i.test(key); -}).reduce((obj, key) => { - // Camel-case - const prop = key - .substring(6) - .toLowerCase() - .replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map - // Coerce string value into JS value - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === 'null') { - val = null; - } else { - val = Number(val); - } - obj[prop] = val; - return obj; -}, {}); +/***/ }), -/** - * Is stdout a TTY? Colored output is enabled when `true`. - */ +/***/ 38171: +/***/ ((module) => { -function useColors() { - return 'colors' in exports.inspectOpts ? - Boolean(exports.inspectOpts.colors) : - tty.isatty(process.stderr.fd); -} +"use strict"; -/** - * Adds ANSI color escape codes if enabled. - * - * @api public - */ -function formatArgs(args) { - const {namespace: name, useColors} = this; +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; - if (useColors) { - const c = this.color; - const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); - const prefix = ` ${colorCode};1m${name} \u001B[0m`; +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } - args[0] = prefix + args[0].split('\n').join('\n' + prefix); - args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); - } else { - args[0] = getDate() + name + ' ' + args[0]; + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; } -} -function getDate() { - if (exports.inspectOpts.hideDate) { - return ''; + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; } - return new Date().toISOString() + ' '; -} -/** - * Invokes `util.format()` with the specified arguments and writes to stderr. - */ + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } -function log(...args) { - return process.stderr.write(util.format(...args) + '\n'); -} + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; -/** - * Save `namespaces`. - * - * @param {String} namespaces - * @api private - */ -function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); } else { - // If you set a process.env field to null or undefined, it gets cast to the - // string 'null' or 'undefined'. Just delete instead. - delete process.env.DEBUG; + target[options.name] = options.newValue; } -} - -/** - * Load `namespaces`. - * - * @return {String} returns the previously persisted debug modes - * @api private - */ +}; -function load() { - return process.env.DEBUG; -} +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } -/** - * Init logic for `debug` instances. - * - * Create a new `inspectOpts` object in case `useColors` is set - * differently for a particular `debug` instance. - */ + return obj[name]; +}; -function init(debug) { - debug.inspectOpts = {}; +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; - const keys = Object.keys(exports.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; } -} - -module.exports = __nccwpck_require__(6243)(exports); - -const {formatters} = module.exports; -/** - * Map %o to `util.inspect()`, all on a single line. - */ + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); -formatters.o = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts) - .split('\n') - .map(str => str.trim()) - .join(' '); -}; + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } -/** - * Map %O to `util.inspect()`, allowing multiple lines if needed. - */ + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); -formatters.O = function (v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; }; /***/ }), -/***/ 8611: +/***/ 12603: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Stream = (__nccwpck_require__(2781).Stream); -var util = __nccwpck_require__(3837); +"use strict"; -module.exports = DelayedStream; -function DelayedStream() { - this.source = null; - this.dataSize = 0; - this.maxDataSize = 1024 * 1024; - this.pauseStream = true; - this._maxDataSizeExceeded = false; - this._released = false; - this._bufferedEvents = []; +const validator = __nccwpck_require__(61739); +const XMLParser = __nccwpck_require__(42380); +const XMLBuilder = __nccwpck_require__(80660); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder } -util.inherits(DelayedStream, Stream); -DelayedStream.create = function(source, options) { - var delayedStream = new this(); +/***/ }), - options = options || {}; - for (var option in options) { - delayedStream[option] = options[option]; - } +/***/ 4958: +/***/ ((module) => { - delayedStream.source = source; +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} - var realEmit = source.emit; - source.emit = function() { - delayedStream._handleEmit(arguments); - return realEmit.apply(source, arguments); - }; +module.exports = getIgnoreAttributesFn - source.on('error', function() {}); - if (delayedStream.pauseStream) { - source.pause(); - } +/***/ }), - return delayedStream; -}; +/***/ 38280: +/***/ ((__unused_webpack_module, exports) => { -Object.defineProperty(DelayedStream.prototype, 'readable', { - configurable: true, - enumerable: true, - get: function() { - return this.source.readable; - } -}); +"use strict"; -DelayedStream.prototype.setEncoding = function() { - return this.source.setEncoding.apply(this.source, arguments); -}; -DelayedStream.prototype.resume = function() { - if (!this._released) { - this.release(); - } +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); - this.source.resume(); +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; }; -DelayedStream.prototype.pause = function() { - this.source.pause(); +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); }; -DelayedStream.prototype.release = function() { - this._released = true; - - this._bufferedEvents.forEach(function(args) { - this.emit.apply(this, args); - }.bind(this)); - this._bufferedEvents = []; +exports.isExist = function(v) { + return typeof v !== 'undefined'; }; -DelayedStream.prototype.pipe = function() { - var r = Stream.prototype.pipe.apply(this, arguments); - this.resume(); - return r; +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; }; -DelayedStream.prototype._handleEmit = function(args) { - if (this._released) { - this.emit.apply(this, args); - return; - } - - if (args[0] === 'data') { - this.dataSize += args[1].length; - this._checkIfMaxDataSizeExceeded(); +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } } - - this._bufferedEvents.push(args); }; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ -DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { - if (this._maxDataSizeExceeded) { - return; +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; } +}; - if (this.dataSize <= this.maxDataSize) { - return; - } +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; - this._maxDataSizeExceeded = true; - var message = - 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' - this.emit('error', new Error(message)); -}; +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; /***/ }), -/***/ 6599: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var stream = __nccwpck_require__(1642) -var eos = __nccwpck_require__(1205) -var inherits = __nccwpck_require__(4124) -var shift = __nccwpck_require__(6121) +/***/ 61739: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) - ? Buffer.from([0]) - : new Buffer([0]) +"use strict"; -var onuncork = function(self, fn) { - if (self._corked) self.once('uncork', fn) - else fn() -} -var autoDestroy = function (self, err) { - if (self._autoDestroy) self.destroy(err) -} +const util = __nccwpck_require__(38280); -var destroyer = function(self, end) { - return function(err) { - if (err) autoDestroy(self, err.message === 'premature close' ? null : err) - else if (end && !self._ended) self.end() - } -} +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; -var end = function(ws, fn) { - if (!ws) return fn() - if (ws._writableState && ws._writableState.finished) return fn() - if (ws._writableState) return ws.end(fn) - ws.end() - fn() -} +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); -var noop = function() {} + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; -var toStreams2 = function(rs) { - return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) -} + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; -var Duplexify = function(writable, readable, opts) { - if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) - stream.Duplex.call(this, opts) + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { - this._writable = null - this._readable = null - this._readable2 = null + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); - this._autoDestroy = !opts || opts.autoDestroy !== false - this._forwardDestroy = !opts || opts.destroy !== false - this._forwardEnd = !opts || opts.end !== false - this._corked = 1 // start corked - this._ondrain = null - this._drained = false - this._forwarding = false - this._unwrite = null - this._unread = null - this._ended = false + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } - this.destroyed = false + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; - if (writable) this.setWritable(writable) - if (readable) this.setReadable(readable) -} + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } -inherits(Duplexify, stream.Duplex) + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } -Duplexify.obj = function(writable, readable, opts) { - if (!opts) opts = {} - opts.objectMode = true - opts.highWaterMark = 16 - return new Duplexify(writable, readable, opts) -} + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } -Duplexify.prototype.cork = function() { - if (++this._corked === 1) this.emit('cork') -} + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } -Duplexify.prototype.uncork = function() { - if (this._corked && --this._corked === 0) this.emit('uncork') -} + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } -Duplexify.prototype.setWritable = function(writable) { - if (this._unwrite) this._unwrite() + return true; +}; - if (this.destroyed) { - if (writable && writable.destroy) writable.destroy() - return +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } } + return i; +} - if (writable === null || writable === false) { - this.end() - return +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } } - var self = this - var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + return i; +} - var ondrain = function() { - var ondrain = self._ondrain - self._ondrain = null - if (ondrain) ondrain() - } +const doubleQuote = '"'; +const singleQuote = "'"; - var clear = function() { - self._writable.removeListener('drain', ondrain) - unend() +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; } - if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks - - this._writable = writable - this._writable.on('drain', ondrain) - this._unwrite = clear - - this.uncork() // always uncork setWritable + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; } -Duplexify.prototype.setReadable = function(readable) { - if (this._unread) this._unread() - - if (this.destroyed) { - if (readable && readable.destroy) readable.destroy() - return - } +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); - if (readable === null || readable === false) { - this.push(null) - this.resume() - return - } +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" - var self = this - var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); - var onreadable = function() { - self._forward() - } + //if(attrStr.trim().length === 0) return true; //empty string - var onend = function() { - self.push(null) - } + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; - var clear = function() { - self._readable2.removeListener('readable', onreadable) - self._readable2.removeListener('end', onend) - unend() + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } } - this._drained = true - this._readable = readable - this._readable2 = readable._readableState ? readable : toStreams2(readable) - this._readable2.on('readable', onreadable) - this._readable2.on('end', onend) - this._unread = clear - - this._forward() + return true; } -Duplexify.prototype._read = function() { - this._drained = true - this._forward() +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; } -Duplexify.prototype._forward = function() { - if (this._forwarding || !this._readable2 || !this._drained) return - this._forwarding = true - - var data - - while (this._drained && (data = shift(this._readable2)) !== null) { - if (this.destroyed) continue - this._drained = this.push(data) +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); } - - this._forwarding = false + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; } -Duplexify.prototype.destroy = function(err, cb) { - if (!cb) cb = noop - if (this.destroyed) return cb(null) - this.destroyed = true - - var self = this - process.nextTick(function() { - self._destroy(err) - cb(null) - }) +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; } -Duplexify.prototype._destroy = function(err) { - if (err) { - var ondrain = this._ondrain - this._ondrain = null - if (ondrain) ondrain(err) - else this.emit('error', err) - } - - if (this._forwardDestroy) { - if (this._readable && this._readable.destroy) this._readable.destroy() - if (this._writable && this._writable.destroy) this._writable.destroy() - } - - this.emit('close') +function validateAttrName(attrName) { + return util.isName(attrName); } -Duplexify.prototype._write = function(data, enc, cb) { - if (this.destroyed) return - if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) - if (data === SIGNAL_FLUSH) return this._finish(cb) - if (!this._writable) return cb() +// const startsWithXML = /^xml/i; - if (this._writable.write(data) === false) this._ondrain = cb - else if (!this.destroyed) cb() +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; } -Duplexify.prototype._finish = function(cb) { - var self = this - this.emit('preend') - onuncork(this, function() { - end(self._forwardEnd && self._writable, function() { - // haxx to not emit prefinish twice - if (self._writableState.prefinished === false) self._writableState.prefinished = true - self.emit('prefinish') - onuncork(self, cb) - }) - }) -} +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, -Duplexify.prototype.end = function(data, enc, cb) { - if (typeof data === 'function') return this.end(null, null, data) - if (typeof enc === 'function') return this.end(data, null, enc) - this._ended = true - if (data) this.write(data) - if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) - return stream.Writable.prototype.end.call(this, cb) + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; } -module.exports = Duplexify +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} /***/ }), -/***/ 1728: +/***/ 80660: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(72462); +const getIgnoreAttributesFn = __nccwpck_require__(4958) -var Buffer = (__nccwpck_require__(1867).Buffer); - -var getParamBytesForAlg = __nccwpck_require__(528); - -var MAX_OCTET = 0x80, - CLASS_UNIVERSAL = 0, - PRIMITIVE_BIT = 0x20, - TAG_SEQ = 0x10, - TAG_INT = 0x02, - ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), - ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); - -function base64Url(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} - -function signatureAsBuffer(signature) { - if (Buffer.isBuffer(signature)) { - return signature; - } else if ('string' === typeof signature) { - return Buffer.from(signature, 'base64'); - } - - throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); -} - -function derToJose(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); - - // the DER encoded param should at most be the param size, plus a padding - // zero, since due to being a signed integer - var maxEncodedParamLength = paramBytes + 1; - - var inputLength = signature.length; - - var offset = 0; - if (signature[offset++] !== ENCODED_TAG_SEQ) { - throw new Error('Could not find expected "seq"'); - } - - var seqLength = signature[offset++]; - if (seqLength === (MAX_OCTET | 1)) { - seqLength = signature[offset++]; - } - - if (inputLength - offset < seqLength) { - throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); - } - - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "r"'); - } - - var rLength = signature[offset++]; - - if (inputLength - offset - 2 < rLength) { - throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); - } - - if (maxEncodedParamLength < rLength) { - throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } - - var rOffset = offset; - offset += rLength; - - if (signature[offset++] !== ENCODED_TAG_INT) { - throw new Error('Could not find expected "int" for "s"'); - } - - var sLength = signature[offset++]; - - if (inputLength - offset !== sLength) { - throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); - } - - if (maxEncodedParamLength < sLength) { - throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); - } - - var sOffset = offset; - offset += sLength; - - if (offset !== inputLength) { - throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); - } - - var rPadding = paramBytes - rLength, - sPadding = paramBytes - sLength; - - var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); - - for (offset = 0; offset < rPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); - - offset = paramBytes; - - for (var o = offset; offset < o + sPadding; ++offset) { - dst[offset] = 0; - } - signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); - - dst = dst.toString('base64'); - dst = base64Url(dst); - - return dst; -} +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; -function countPadding(buf, start, stop) { - var padding = 0; - while (start + padding < stop && buf[start + padding] === 0) { - ++padding; - } +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } - var needsSign = buf[start + padding] >= MAX_OCTET; - if (needsSign) { - --padding; - } + this.processTextOrObjNode = processTextOrObjNode - return padding; + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } } -function joseToDer(signature, alg) { - signature = signatureAsBuffer(signature); - var paramBytes = getParamBytesForAlg(alg); - - var signatureBytes = signature.length; - if (signatureBytes !== paramBytes * 2) { - throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); - } - - var rPadding = countPadding(signature, 0, paramBytes); - var sPadding = countPadding(signature, paramBytes, signature.length); - var rLength = paramBytes - rPadding; - var sLength = paramBytes - sPadding; - - var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; - - var shortLength = rsBytes < MAX_OCTET; - - var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; - var offset = 0; - dst[offset++] = ENCODED_TAG_SEQ; - if (shortLength) { - // Bit 8 has value "0" - // bits 7-1 give the length. - dst[offset++] = rsBytes; - } else { - // Bit 8 of first octet has value "1" - // bits 7-1 give the number of additional length octets. - dst[offset++] = MAX_OCTET | 1; - // length, base 256 - dst[offset++] = rsBytes & 0xff; - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = rLength; - if (rPadding < 0) { - dst[offset++] = 0; - offset += signature.copy(dst, offset, 0, paramBytes); - } else { - offset += signature.copy(dst, offset, rPadding, paramBytes); - } - dst[offset++] = ENCODED_TAG_INT; - dst[offset++] = sLength; - if (sPadding < 0) { - dst[offset++] = 0; - signature.copy(dst, offset, paramBytes); - } else { - signature.copy(dst, offset, paramBytes + sPadding); - } +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; - return dst; +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; } -module.exports = { - derToJose: derToJose, - joseToDer: joseToDer -}; - +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} -/***/ }), +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ -/***/ 528: -/***/ ((module) => { + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} -"use strict"; +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { - -var once = __nccwpck_require__(1223); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; - - -/***/ }), +/***/ 72462: +/***/ ((module) => { -/***/ 4697: -/***/ ((module, exports) => { +const EOL = "\n"; -"use strict"; /** - * @author Toru Nagashima - * @copyright 2015 Toru Nagashima. All rights reserved. - * See LICENSE file in root directory for full license. + * + * @param {array} jArray + * @param {any} options + * @returns */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; -Object.defineProperty(exports, "__esModule", ({ value: true })); + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; -/** - * @typedef {object} PrivateData - * @property {EventTarget} eventTarget The event target. - * @property {{type:string}} event The original event object. - * @property {number} eventPhase The current event phase. - * @property {EventTarget|null} currentTarget The current event target. - * @property {boolean} canceled The flag to prevent default. - * @property {boolean} stopped The flag to stop propagation. - * @property {boolean} immediateStopped The flag to stop propagation immediately. - * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. - * @property {number} timeStamp The unix time. - * @private - */ + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; -/** - * Private data for event wrappers. - * @type {WeakMap} - * @private - */ -const privateData = new WeakMap(); + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } -/** - * Cache for wrapper classes. - * @type {WeakMap} - * @private - */ -const wrappers = new WeakMap(); + return xmlStr; +} -/** - * Get private data. - * @param {Event} event The event object to get private data. - * @returns {PrivateData} The private data of the event. - * @private - */ -function pd(event) { - const retv = privateData.get(event); - console.assert( - retv != null, - "'this' is expected an Event object, but got", - event - ); - return retv +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } } -/** - * https://dom.spec.whatwg.org/#set-the-canceled-flag - * @param data {PrivateData} private data. - */ -function setCancelFlag(data) { - if (data.passiveListener != null) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error( - "Unable to preventDefault inside passive event listener invocation.", - data.passiveListener - ); +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } } - return } - if (!data.event.cancelable) { - return + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; } + return false; +} - data.canceled = true; - if (typeof data.event.preventDefault === "function") { - data.event.preventDefault(); +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } } + return textValue; } +module.exports = toXml; -/** - * @see https://dom.spec.whatwg.org/#interface-event - * @private - */ -/** - * The event wrapper. - * @constructor - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Event|{type:string}} event The original event to wrap. - */ -function Event(eventTarget, event) { - privateData.set(this, { - eventTarget, - event, - eventPhase: 2, - currentTarget: eventTarget, - canceled: false, - stopped: false, - immediateStopped: false, - passiveListener: null, - timeStamp: event.timeStamp || Date.now(), - }); - // https://heycam.github.io/webidl/#Unforgeable - Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); +/***/ }), - // Define accessors - const keys = Object.keys(event); - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in this)) { - Object.defineProperty(this, key, defineRedirectDescriptor(key)); +/***/ 6072: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(38280); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); } + return {entities, i}; } -// Should be enumerable, but class methods are not enumerable. -Event.prototype = { - /** - * The type of this event. - * @type {string} - */ - get type() { - return pd(this).event.type - }, - - /** - * The target of this event. - * @type {EventTarget} - */ - get target() { - return pd(this).eventTarget - }, +function readEntityExp(xmlData,i){ + //External entities are not supported + // - /** - * The target of this event. - * @type {EventTarget} - */ - get currentTarget() { - return pd(this).currentTarget - }, + //Parameter entities are not supported + // - /** - * @returns {EventTarget[]} The composed path of this event. - */ - composedPath() { - const currentTarget = pd(this).currentTarget; - if (currentTarget == null) { - return [] - } - return [currentTarget] - }, + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); - /** - * Constant of NONE. - * @type {number} - */ - get NONE() { - return 0 - }, + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} - /** - * Constant of CAPTURING_PHASE. - * @type {number} - */ - get CAPTURING_PHASE() { - return 1 - }, +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} - /** - * Constant of AT_TARGET. - * @type {number} - */ - get AT_TARGET() { - return 2 - }, +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} - /** - * Constant of BUBBLING_PHASE. - * @type {number} - */ - get BUBBLING_PHASE() { - return 3 - }, +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} - /** - * The target of this event. - * @type {number} - */ - get eventPhase() { - return pd(this).eventPhase - }, +module.exports = readDocType; - /** - * Stop event bubbling. - * @returns {void} - */ - stopPropagation() { - const data = pd(this); - data.stopped = true; - if (typeof data.event.stopPropagation === "function") { - data.event.stopPropagation(); - } - }, +/***/ }), - /** - * Stop event bubbling. - * @returns {void} - */ - stopImmediatePropagation() { - const data = pd(this); +/***/ 86993: +/***/ ((__unused_webpack_module, exports) => { - data.stopped = true; - data.immediateStopped = true; - if (typeof data.event.stopImmediatePropagation === "function") { - data.event.stopImmediatePropagation(); - } - }, - /** - * The flag to be bubbling. - * @type {boolean} - */ - get bubbles() { - return Boolean(pd(this).event.bubbles) +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true }, - - /** - * The flag to be cancelable. - * @type {boolean} - */ - get cancelable() { - return Boolean(pd(this).event.cancelable) + tagValueProcessor: function(tagName, val) { + return val; }, - - /** - * Cancel this event. - * @returns {void} - */ - preventDefault() { - setCancelFlag(pd(this)); + attributeValueProcessor: function(attrName, val) { + return val; }, - - /** - * The flag to indicate cancellation state. - * @type {boolean} - */ - get defaultPrevented() { - return pd(this).canceled + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; - /** - * The flag to be composed. - * @type {boolean} - */ - get composed() { - return Boolean(pd(this).event.composed) - }, +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; - /** - * The unix time of this event. - * @type {number} - */ - get timeStamp() { - return pd(this).timeStamp - }, +/***/ }), - /** - * The target of this event. - * @type {EventTarget} - * @deprecated - */ - get srcElement() { - return pd(this).eventTarget - }, +/***/ 25832: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * The flag to stop event bubbling. - * @type {boolean} - * @deprecated - */ - get cancelBubble() { - return pd(this).stopped - }, - set cancelBubble(value) { - if (!value) { - return - } - const data = pd(this); +"use strict"; - data.stopped = true; - if (typeof data.event.cancelBubble === "boolean") { - data.event.cancelBubble = true; - } - }, +///@ts-check - /** - * The flag to indicate cancellation state. - * @type {boolean} - * @deprecated - */ - get returnValue() { - return !pd(this).canceled - }, - set returnValue(value) { - if (!value) { - setCancelFlag(pd(this)); - } - }, +const util = __nccwpck_require__(38280); +const xmlNode = __nccwpck_require__(7462); +const readDocType = __nccwpck_require__(6072); +const toNumber = __nccwpck_require__(14526); +const getIgnoreAttributesFn = __nccwpck_require__(4958) - /** - * Initialize this event object. But do nothing under event dispatching. - * @param {string} type The event type. - * @param {boolean} [bubbles=false] The flag to be possible to bubble up. - * @param {boolean} [cancelable=false] The flag to be possible to cancel. - * @deprecated - */ - initEvent() { - // Do nothing. - }, -}; +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); -// `constructor` is not enumerable. -Object.defineProperty(Event.prototype, "constructor", { - value: Event, - configurable: true, - writable: true, -}); +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); -// Ensure `event instanceof window.Event` is `true`. -if (typeof window !== "undefined" && typeof window.Event !== "undefined") { - Object.setPrototypeOf(Event.prototype, window.Event.prototype); +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + } - // Make association for wrappers. - wrappers.set(window.Event.prototype, Event); } -/** - * Get the property descriptor to redirect a given property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to redirect the property. - * @private - */ -function defineRedirectDescriptor(key) { - return { - get() { - return pd(this).event[key] - }, - set(value) { - pd(this).event[key] = value; - }, - configurable: true, - enumerable: true, +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] } + } } /** - * Get the property descriptor to call a given method property. - * @param {string} key Property name to define property descriptor. - * @returns {PropertyDescriptor} The property descriptor to call the method property. - * @private + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities */ -function defineCallDescriptor(key) { - return { - value() { - const event = pd(this).event; - return event[key].apply(event, arguments) - }, - configurable: true, - enumerable: true, +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } } + } } -/** - * Define new wrapper class. - * @param {Function} BaseEvent The base wrapper class. - * @param {Object} proto The prototype of the original event. - * @returns {Function} The defined wrapper class. - * @private - */ -function defineWrapper(BaseEvent, proto) { - const keys = Object.keys(proto); - if (keys.length === 0) { - return BaseEvent +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); - /** CustomEvent */ - function CustomEvent(eventTarget, event) { - BaseEvent.call(this, eventTarget, event); - } + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } - CustomEvent.prototype = Object.create(BaseEvent.prototype, { - constructor: { value: CustomEvent, configurable: true, writable: true }, - }); + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } - // Define accessors. - for (let i = 0; i < keys.length; ++i) { - const key = keys[i]; - if (!(key in BaseEvent.prototype)) { - const descriptor = Object.getOwnPropertyDescriptor(proto, key); - const isFunc = typeof descriptor.value === "function"; - Object.defineProperty( - CustomEvent.prototype, - key, - isFunc - ? defineCallDescriptor(key) - : defineRedirectDescriptor(key) - ); + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); } - } - return CustomEvent -} + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); -/** - * Get the wrapper class of a given prototype. - * @param {Object} proto The prototype of the original event to get its wrapper. - * @returns {Function} The wrapper class. - * @private - */ -function getWrapper(proto) { - if (proto == null || proto === Object.prototype) { - return Event - } + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { - let wrapper = wrappers.get(proto); - if (wrapper == null) { - wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); - wrappers.set(proto, wrapper); - } - return wrapper -} + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); -/** - * Wrap a given event to management a dispatching. - * @param {EventTarget} eventTarget The event target of this dispatching. - * @param {Object} event The event to wrap. - * @returns {Event} The wrapper instance. - * @private - */ -function wrapEvent(eventTarget, event) { - const Wrapper = getWrapper(Object.getPrototypeOf(event)); - return new Wrapper(eventTarget, event) -} + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ -/** - * Get the immediateStopped flag of a given event. - * @param {Event} event The event to get. - * @returns {boolean} The flag to stop propagation immediately. - * @private - */ -function isStopped(event) { - return pd(event).immediateStopped -} + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) -/** - * Set the current event phase of a given event. - * @param {Event} event The event to set current target. - * @param {number} eventPhase New event phase. - * @returns {void} - * @private - */ -function setEventPhase(event, eventPhase) { - pd(event).eventPhase = eventPhase; -} + } -/** - * Set the current target of a given event. - * @param {Event} event The event to set current target. - * @param {EventTarget|null} currentTarget New current target. - * @returns {void} - * @private - */ -function setCurrentTarget(event, currentTarget) { - pd(event).currentTarget = currentTarget; -} -/** - * Set a passive listener of a given event. - * @param {Event} event The event to set current target. - * @param {Function|null} passiveListener New passive listener. - * @returns {void} - * @private - */ -function setPassiveListener(event, passiveListener) { - pd(event).passiveListener = passiveListener; -} + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); -/** - * @typedef {object} ListenerNode - * @property {Function} listener - * @property {1|2|3} listenerType - * @property {boolean} passive - * @property {boolean} once - * @property {ListenerNode|null} next - * @private - */ + textData = this.saveTextToParentTag(textData, currentNode, jPath); -/** - * @type {WeakMap>} - * @private - */ -const listenersMap = new WeakMap(); + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); -// Listener types -const CAPTURE = 1; -const BUBBLE = 2; -const ATTRIBUTE = 3; + textData = this.saveTextToParentTag(textData, currentNode, jPath); -/** - * Check whether a given value is an object or not. - * @param {any} x The value to check. - * @returns {boolean} `true` if the value is an object. - */ -function isObject(x) { - return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax -} + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; -/** - * Get listeners. - * @param {EventTarget} eventTarget The event target to get. - * @returns {Map} The listeners. - * @private - */ -function getListeners(eventTarget) { - const listeners = listenersMap.get(eventTarget); - if (listeners == null) { - throw new TypeError( - "'this' is expected an EventTarget object, but got another value." - ) - } - return listeners -} + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; -/** - * Get the property descriptor for the event attribute of a given event. - * @param {string} eventName The event name to get property descriptor. - * @returns {PropertyDescriptor} The property descriptor. - * @private - */ -function defineEventAttributeDescriptor(eventName) { - return { - get() { - const listeners = getListeners(this); - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - return node.listener - } - node = node.next; - } - return null - }, + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } - set(listener) { - if (typeof listener !== "function" && !isObject(listener)) { - listener = null; // eslint-disable-line no-param-reassign + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); } - const listeners = getListeners(this); - - // Traverse to the tail while removing old value. - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if (node.listenerType === ATTRIBUTE) { - // Remove old value. - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } - node = node.next; + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); } - // Add new value. - if (listener !== null) { - const newNode = { - listener, - listenerType: ATTRIBUTE, - passive: false, - once: false, - next: null, - }; - if (prev === null) { - listeners.set(eventName, newNode); - } else { - prev.next = newNode; - } + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); } - }, - configurable: true, - enumerable: true, + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; } +//TODO: use jPath to simplify the logic /** - * Define an event attribute (e.g. `eventTarget.onclick`). - * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. - * @param {string} eventName The event name to define. - * @returns {void} + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName */ -function defineEventAttribute(eventTargetPrototype, eventName) { - Object.defineProperty( - eventTargetPrototype, - `on${eventName}`, - defineEventAttributeDescriptor(eventName) - ); +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; } /** - * Define a custom EventTarget with event attributes. - * @param {string[]} eventNames Event names for event attributes. - * @returns {EventTarget} The custom EventTarget. - * @private + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns */ -function defineCustomEventTarget(eventNames) { - /** CustomEventTarget */ - function CustomEventTarget() { - EventTarget.call(this); +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " } + tagExp += ch; + } +} - CustomEventTarget.prototype = Object.create(EventTarget.prototype, { - constructor: { - value: CustomEventTarget, - configurable: true, - writable: true, - }, - }); +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} - for (let i = 0; i < eventNames.length; ++i) { - defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); } + } - return CustomEventTarget + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } } - /** - * EventTarget. - * - * - This is constructor if no arguments. - * - This is a function which returns a CustomEventTarget constructor if there are arguments. - * - * For example: - * - * class A extends EventTarget {} - * class B extends EventTarget("message") {} - * class C extends EventTarget("message", "error") {} - * class D extends EventTarget(["message", "error"]) {} + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i */ -function EventTarget() { - /*eslint-disable consistent-return */ - if (this instanceof EventTarget) { - listenersMap.set(this, new Map()); - return - } - if (arguments.length === 1 && Array.isArray(arguments[0])) { - return defineCustomEventTarget(arguments[0]) - } - if (arguments.length > 0) { - const types = new Array(arguments.length); - for (let i = 0; i < arguments.length; ++i) { - types[i] = arguments[i]; +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } } - return defineCustomEventTarget(types) + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; } - throw new TypeError("Cannot call a class as a function") - /*eslint-enable consistent-return */ + } } -// Should be enumerable, but class methods are not enumerable. -EventTarget.prototype = { - /** - * Add a given listener to this event target. - * @param {string} eventName The event name to add. - * @param {Function} listener The listener to add. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} - */ - addEventListener(eventName, listener, options) { - if (listener == null) { - return - } - if (typeof listener !== "function" && !isObject(listener)) { - throw new TypeError("'listener' should be a function or an object.") - } - const listeners = getListeners(this); - const optionsIsObj = isObject(options); - const capture = optionsIsObj - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - const newNode = { - listener, - listenerType, - passive: optionsIsObj && Boolean(options.passive), - once: optionsIsObj && Boolean(options.once), - next: null, - }; +module.exports = OrderedObjParser; - // Set it as the first node if the first node is null. - let node = listeners.get(eventName); - if (node === undefined) { - listeners.set(eventName, newNode); - return - } - // Traverse to the tail while checking duplication.. - let prev = null; - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - // Should ignore duplication. - return - } - prev = node; - node = node.next; - } +/***/ }), - // Add it. - prev.next = newNode; - }, +/***/ 42380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(86993); +const OrderedObjParser = __nccwpck_require__(25832); +const { prettify} = __nccwpck_require__(42882); +const validator = __nccwpck_require__(61739); +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } /** - * Remove a given listener from this event target. - * @param {string} eventName The event name to remove. - * @param {Function} listener The listener to remove. - * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. - * @returns {void} + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption */ - removeEventListener(eventName, listener, options) { - if (listener == null) { - return + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") } - - const listeners = getListeners(this); - const capture = isObject(options) - ? Boolean(options.capture) - : Boolean(options); - const listenerType = capture ? CAPTURE : BUBBLE; - - let prev = null; - let node = listeners.get(eventName); - while (node != null) { - if ( - node.listener === listener && - node.listenerType === listenerType - ) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - return + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) } - - prev = node; - node = node.next; - } - }, + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } /** - * Dispatch a given event. - * @param {Event|{type:string}} event The event to dispatch. - * @returns {boolean} `false` if canceled. + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value */ - dispatchEvent(event) { - if (event == null || typeof event.type !== "string") { - throw new TypeError('"event.type" should be a string.') - } - - // If listeners aren't registered, terminate. - const listeners = getListeners(this); - const eventName = event.type; - let node = listeners.get(eventName); - if (node == null) { - return true + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; } + } +} - // Since we cannot rewrite several properties, so wrap object. - const wrappedEvent = wrapEvent(this, event); - - // This doesn't process capturing phase and bubbling phase. - // This isn't participating in a tree. - let prev = null; - while (node != null) { - // Remove this listener if it's once - if (node.once) { - if (prev !== null) { - prev.next = node.next; - } else if (node.next !== null) { - listeners.set(eventName, node.next); - } else { - listeners.delete(eventName); - } - } else { - prev = node; - } - - // Call this listener - setPassiveListener( - wrappedEvent, - node.passive ? node.listener : null - ); - if (typeof node.listener === "function") { - try { - node.listener.call(this, wrappedEvent); - } catch (err) { - if ( - typeof console !== "undefined" && - typeof console.error === "function" - ) { - console.error(err); - } - } - } else if ( - node.listenerType !== ATTRIBUTE && - typeof node.listener.handleEvent === "function" - ) { - node.listener.handleEvent(wrappedEvent); - } +module.exports = XMLParser; - // Break if `event.stopImmediatePropagation` was called. - if (isStopped(wrappedEvent)) { - break - } +/***/ }), - node = node.next; - } - setPassiveListener(wrappedEvent, null); - setEventPhase(wrappedEvent, 0); - setCurrentTarget(wrappedEvent, null); +/***/ 42882: +/***/ ((__unused_webpack_module, exports) => { - return !wrappedEvent.defaultPrevented - }, -}; +"use strict"; -// `constructor` is not enumerable. -Object.defineProperty(EventTarget.prototype, "constructor", { - value: EventTarget, - configurable: true, - writable: true, -}); -// Ensure `eventTarget instanceof window.EventTarget` is `true`. -if ( - typeof window !== "undefined" && - typeof window.EventTarget !== "undefined" -) { - Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); } -exports.defineEventAttribute = defineEventAttribute; -exports.EventTarget = EventTarget; -exports["default"] = EventTarget; - -module.exports = EventTarget -module.exports.EventTarget = module.exports["default"] = EventTarget -module.exports.defineEventAttribute = defineEventAttribute -//# sourceMappingURL=event-target-shim.js.map - +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; -/***/ }), + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); -/***/ 8171: -/***/ ((module) => { + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } -"use strict"; + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} -var hasOwn = Object.prototype.hasOwnProperty; -var toStr = Object.prototype.toString; -var defineProperty = Object.defineProperty; -var gOPD = Object.getOwnPropertyDescriptor; +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} -var isArray = function isArray(arr) { - if (typeof Array.isArray === 'function') { - return Array.isArray(arr); - } +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } - return toStr.call(arr) === '[object Array]'; -}; + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } -var isPlainObject = function isPlainObject(obj) { - if (!obj || toStr.call(obj) !== '[object Object]') { - return false; - } + return false; +} +exports.prettify = prettify; - var hasOwnConstructor = hasOwn.call(obj, 'constructor'); - var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); - // Not own constructor property must be Object - if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { - return false; - } - // Own properties are enumerated firstly, so to speed up, - // if last one is own, then all properties are own. - var key; - for (key in obj) { /**/ } +/***/ }), - return typeof key === 'undefined' || hasOwn.call(obj, key); -}; +/***/ 7462: +/***/ ((module) => { -// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target -var setProperty = function setProperty(target, options) { - if (defineProperty && options.name === '__proto__') { - defineProperty(target, options.name, { - enumerable: true, - configurable: true, - value: options.newValue, - writable: true - }); - } else { - target[options.name] = options.newValue; - } -}; +"use strict"; -// Return undefined instead of __proto__ if '__proto__' is not an own property -var getProperty = function getProperty(obj, name) { - if (name === '__proto__') { - if (!hasOwn.call(obj, name)) { - return void 0; - } else if (gOPD) { - // In early versions of node, obj['__proto__'] is buggy when obj has - // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. - return gOPD(obj, name).value; - } - } - return obj[name]; +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; }; -module.exports = function extend() { - var options, name, src, copy, copyIsArray, clone; - var target = arguments[0]; - var i = 1; - var length = arguments.length; - var deep = false; - - // Handle a deep copy situation - if (typeof target === 'boolean') { - deep = target; - target = arguments[1] || {}; - // skip the boolean and the target - i = 2; - } - if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { - target = {}; - } - for (; i < length; ++i) { - options = arguments[i]; - // Only deal with non-null/undefined values - if (options != null) { - // Extend the base object - for (name in options) { - src = getProperty(target, name); - copy = getProperty(options, name); +module.exports = XmlNode; - // Prevent never-ending loop - if (target !== copy) { - // Recurse if we're merging plain objects or arrays - if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { - if (copyIsArray) { - copyIsArray = false; - clone = src && isArray(src) ? src : []; - } else { - clone = src && isPlainObject(src) ? src : {}; - } +/***/ }), - // Never move original objects, clone them - setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); +/***/ 31133: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Don't bring in undefined values - } else if (typeof copy !== 'undefined') { - setProperty(target, { name: name, newValue: copy }); - } - } - } - } - } +var debug; - // Return the modified object - return target; +module.exports = function () { + if (!debug) { + try { + /* eslint global-require: off */ + debug = __nccwpck_require__(38237)("follow-redirects"); + } + catch (error) { /* */ } + if (typeof debug !== "function") { + debug = function () { /* */ }; + } + } + debug.apply(null, arguments); }; /***/ }), -/***/ 2603: +/***/ 67707: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - - -const validator = __nccwpck_require__(1739); -const XMLParser = __nccwpck_require__(2380); -const XMLBuilder = __nccwpck_require__(660); +var url = __nccwpck_require__(57310); +var URL = url.URL; +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var Writable = (__nccwpck_require__(12781).Writable); +var assert = __nccwpck_require__(39491); +var debug = __nccwpck_require__(31133); -module.exports = { - XMLParser: XMLParser, - XMLValidator: validator, - XMLBuilder: XMLBuilder +// Whether to use the native URL object or the legacy url module +var useNativeURL = false; +try { + assert(new URL()); +} +catch (error) { + useNativeURL = error.code === "ERR_INVALID_URL"; } -/***/ }), +// URL fields to preserve in copy operations +var preservedUrlFields = [ + "auth", + "host", + "hostname", + "href", + "path", + "pathname", + "port", + "protocol", + "query", + "search", + "hash", +]; -/***/ 8280: -/***/ ((__unused_webpack_module, exports) => { +// Create handlers that pass events from native requests +var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; +var eventHandlers = Object.create(null); +events.forEach(function (event) { + eventHandlers[event] = function (arg1, arg2, arg3) { + this._redirectable.emit(event, arg1, arg2, arg3); + }; +}); -"use strict"; +// Error types with codes +var InvalidUrlError = createErrorType( + "ERR_INVALID_URL", + "Invalid URL", + TypeError +); +var RedirectionError = createErrorType( + "ERR_FR_REDIRECTION_FAILURE", + "Redirected request failed" +); +var TooManyRedirectsError = createErrorType( + "ERR_FR_TOO_MANY_REDIRECTS", + "Maximum number of redirects exceeded", + RedirectionError +); +var MaxBodyLengthExceededError = createErrorType( + "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", + "Request body larger than maxBodyLength limit" +); +var WriteAfterEndError = createErrorType( + "ERR_STREAM_WRITE_AFTER_END", + "write after end" +); +// istanbul ignore next +var destroy = Writable.prototype.destroy || noop; -const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; -const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; -const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' -const regexName = new RegExp('^' + nameRegexp + '$'); +// An HTTP(S) request that can be redirected +function RedirectableRequest(options, responseCallback) { + // Initialize the request + Writable.call(this); + this._sanitizeOptions(options); + this._options = options; + this._ended = false; + this._ending = false; + this._redirectCount = 0; + this._redirects = []; + this._requestBodyLength = 0; + this._requestBodyBuffers = []; -const getAllMatches = function(string, regex) { - const matches = []; - let match = regex.exec(string); - while (match) { - const allmatches = []; - allmatches.startIndex = regex.lastIndex - match[0].length; - const len = match.length; - for (let index = 0; index < len; index++) { - allmatches.push(match[index]); - } - matches.push(allmatches); - match = regex.exec(string); + // Attach a callback if passed + if (responseCallback) { + this.on("response", responseCallback); } - return matches; -}; -const isName = function(string) { - const match = regexName.exec(string); - return !(match === null || typeof match === 'undefined'); -}; + // React to responses of native requests + var self = this; + this._onNativeResponse = function (response) { + try { + self._processResponse(response); + } + catch (cause) { + self.emit("error", cause instanceof RedirectionError ? + cause : new RedirectionError({ cause: cause })); + } + }; -exports.isExist = function(v) { - return typeof v !== 'undefined'; + // Perform the first request + this._performRequest(); +} +RedirectableRequest.prototype = Object.create(Writable.prototype); + +RedirectableRequest.prototype.abort = function () { + destroyRequest(this._currentRequest); + this._currentRequest.abort(); + this.emit("abort"); }; -exports.isEmptyObject = function(obj) { - return Object.keys(obj).length === 0; +RedirectableRequest.prototype.destroy = function (error) { + destroyRequest(this._currentRequest, error); + destroy.call(this, error); + return this; }; -/** - * Copy all the properties of a into b. - * @param {*} target - * @param {*} a - */ -exports.merge = function(target, a, arrayMode) { - if (a) { - const keys = Object.keys(a); // will return an array of own properties - const len = keys.length; //don't make it inline - for (let i = 0; i < len; i++) { - if (arrayMode === 'strict') { - target[keys[i]] = [ a[keys[i]] ]; - } else { - target[keys[i]] = a[keys[i]]; - } +// Writes buffered data to the current native request +RedirectableRequest.prototype.write = function (data, encoding, callback) { + // Writing is not allowed if end has been called + if (this._ending) { + throw new WriteAfterEndError(); + } + + // Validate input and shift parameters if necessary + if (!isString(data) && !isBuffer(data)) { + throw new TypeError("data should be a string, Buffer or Uint8Array"); + } + if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } + + // Ignore empty buffers, since writing them doesn't invoke the callback + // https://github.com/nodejs/node/issues/22066 + if (data.length === 0) { + if (callback) { + callback(); } + return; + } + // Only write when we don't exceed the maximum body length + if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { + this._requestBodyLength += data.length; + this._requestBodyBuffers.push({ data: data, encoding: encoding }); + this._currentRequest.write(data, encoding, callback); + } + // Error when we exceed the maximum body length + else { + this.emit("error", new MaxBodyLengthExceededError()); + this.abort(); } }; -/* exports.merge =function (b,a){ - return Object.assign(b,a); -} */ -exports.getValue = function(v) { - if (exports.isExist(v)) { - return v; - } else { - return ''; +// Ends the current native request +RedirectableRequest.prototype.end = function (data, encoding, callback) { + // Shift parameters if necessary + if (isFunction(data)) { + callback = data; + data = encoding = null; + } + else if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } + + // Write data if needed and end + if (!data) { + this._ended = this._ending = true; + this._currentRequest.end(null, null, callback); + } + else { + var self = this; + var currentRequest = this._currentRequest; + this.write(data, encoding, function () { + self._ended = true; + currentRequest.end(null, null, callback); + }); + this._ending = true; } }; -// const fakeCall = function(a) {return a;}; -// const fakeCallNoReturn = function() {}; +// Sets a header value on the current native request +RedirectableRequest.prototype.setHeader = function (name, value) { + this._options.headers[name] = value; + this._currentRequest.setHeader(name, value); +}; -exports.isName = isName; -exports.getAllMatches = getAllMatches; -exports.nameRegexp = nameRegexp; +// Clears a header value on the current native request +RedirectableRequest.prototype.removeHeader = function (name) { + delete this._options.headers[name]; + this._currentRequest.removeHeader(name); +}; +// Global timeout for all underlying requests +RedirectableRequest.prototype.setTimeout = function (msecs, callback) { + var self = this; -/***/ }), + // Destroys the socket on timeout + function destroyOnTimeout(socket) { + socket.setTimeout(msecs); + socket.removeListener("timeout", socket.destroy); + socket.addListener("timeout", socket.destroy); + } -/***/ 1739: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // Sets up a timer to trigger a timeout event + function startTimer(socket) { + if (self._timeout) { + clearTimeout(self._timeout); + } + self._timeout = setTimeout(function () { + self.emit("timeout"); + clearTimer(); + }, msecs); + destroyOnTimeout(socket); + } -"use strict"; + // Stops a timeout from triggering + function clearTimer() { + // Clear the timeout + if (self._timeout) { + clearTimeout(self._timeout); + self._timeout = null; + } + + // Clean up all attached listeners + self.removeListener("abort", clearTimer); + self.removeListener("error", clearTimer); + self.removeListener("response", clearTimer); + self.removeListener("close", clearTimer); + if (callback) { + self.removeListener("timeout", callback); + } + if (!self.socket) { + self._currentRequest.removeListener("socket", startTimer); + } + } + // Attach callback if passed + if (callback) { + this.on("timeout", callback); + } -const util = __nccwpck_require__(8280); + // Start the timer if or when the socket is opened + if (this.socket) { + startTimer(this.socket); + } + else { + this._currentRequest.once("socket", startTimer); + } -const defaultOptions = { - allowBooleanAttributes: false, //A tag can have attributes without any value - unpairedTags: [] -}; + // Clean up on events + this.on("socket", destroyOnTimeout); + this.on("abort", clearTimer); + this.on("error", clearTimer); + this.on("response", clearTimer); + this.on("close", clearTimer); -//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); -exports.validate = function (xmlData, options) { - options = Object.assign({}, defaultOptions, options); + return this; +}; - //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line - //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag - //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE - const tags = []; - let tagFound = false; +// Proxy all other public ClientRequest methods +[ + "flushHeaders", "getHeader", + "setNoDelay", "setSocketKeepAlive", +].forEach(function (method) { + RedirectableRequest.prototype[method] = function (a, b) { + return this._currentRequest[method](a, b); + }; +}); - //indicates that the root tag has been closed (aka. depth 0 has been reached) - let reachedRoot = false; +// Proxy all public ClientRequest properties +["aborted", "connection", "socket"].forEach(function (property) { + Object.defineProperty(RedirectableRequest.prototype, property, { + get: function () { return this._currentRequest[property]; }, + }); +}); - if (xmlData[0] === '\ufeff') { - // check for byte order mark (BOM) - xmlData = xmlData.substr(1); +RedirectableRequest.prototype._sanitizeOptions = function (options) { + // Ensure headers are always present + if (!options.headers) { + options.headers = {}; } - - for (let i = 0; i < xmlData.length; i++) { - if (xmlData[i] === '<' && xmlData[i+1] === '?') { - i+=2; - i = readPI(xmlData,i); - if (i.err) return i; - }else if (xmlData[i] === '<') { - //starting of tag - //read until you reach to '>' avoiding any '>' in attribute value - let tagStartPos = i; - i++; - - if (xmlData[i] === '!') { - i = readCommentAndCDATA(xmlData, i); - continue; - } else { - let closingTag = false; - if (xmlData[i] === '/') { - //closing tag - closingTag = true; - i++; - } - //read tagname - let tagName = ''; - for (; i < xmlData.length && - xmlData[i] !== '>' && - xmlData[i] !== ' ' && - xmlData[i] !== '\t' && - xmlData[i] !== '\n' && - xmlData[i] !== '\r'; i++ - ) { - tagName += xmlData[i]; - } - tagName = tagName.trim(); - //console.log(tagName); + // Since http.request treats host as an alias of hostname, + // but the url module interprets host as hostname plus port, + // eliminate the host property to avoid confusion. + if (options.host) { + // Use hostname if set, because it has precedence + if (!options.hostname) { + options.hostname = options.host; + } + delete options.host; + } - if (tagName[tagName.length - 1] === '/') { - //self closing tag without attributes - tagName = tagName.substring(0, tagName.length - 1); - //continue; - i--; - } - if (!validateTagName(tagName)) { - let msg; - if (tagName.trim().length === 0) { - msg = "Invalid space after '<'."; - } else { - msg = "Tag '"+tagName+"' is an invalid name."; - } - return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); - } + // Complete the URL object when necessary + if (!options.pathname && options.path) { + var searchPos = options.path.indexOf("?"); + if (searchPos < 0) { + options.pathname = options.path; + } + else { + options.pathname = options.path.substring(0, searchPos); + options.search = options.path.substring(searchPos); + } + } +}; - const result = readAttributeStr(xmlData, i); - if (result === false) { - return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); - } - let attrStr = result.value; - i = result.index; - if (attrStr[attrStr.length - 1] === '/') { - //self closing tag - const attrStrStart = i - attrStr.length; - attrStr = attrStr.substring(0, attrStr.length - 1); - const isValid = validateAttributeString(attrStr, options); - if (isValid === true) { - tagFound = true; - //continue; //text may presents after self closing tag - } else { - //the result from the nested function returns the position of the error within the attribute - //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute - //this gives us the absolute index in the entire xml, which we can use to find the line at last - return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); - } - } else if (closingTag) { - if (!result.tagClosed) { - return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); - } else if (attrStr.trim().length > 0) { - return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); - } else if (tags.length === 0) { - return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); - } else { - const otg = tags.pop(); - if (tagName !== otg.tagName) { - let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); - return getErrorObject('InvalidTag', - "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", - getLineNumberForPosition(xmlData, tagStartPos)); - } +// Executes the next native request (initial or redirect) +RedirectableRequest.prototype._performRequest = function () { + // Load the native protocol + var protocol = this._options.protocol; + var nativeProtocol = this._options.nativeProtocols[protocol]; + if (!nativeProtocol) { + throw new TypeError("Unsupported protocol " + protocol); + } - //when there are no more tags, we reached the root level. - if (tags.length == 0) { - reachedRoot = true; - } - } - } else { - const isValid = validateAttributeString(attrStr, options); - if (isValid !== true) { - //the result from the nested function returns the position of the error within the attribute - //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute - //this gives us the absolute index in the entire xml, which we can use to find the line at last - return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); - } + // If specified, use the agent corresponding to the protocol + // (HTTP and HTTPS use different types of agents) + if (this._options.agents) { + var scheme = protocol.slice(0, -1); + this._options.agent = this._options.agents[scheme]; + } - //if the root level has been reached before ... - if (reachedRoot === true) { - return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); - } else if(options.unpairedTags.indexOf(tagName) !== -1){ - //don't push into stack - } else { - tags.push({tagName, tagStartPos}); - } - tagFound = true; - } + // Create the native request and set up its event handlers + var request = this._currentRequest = + nativeProtocol.request(this._options, this._onNativeResponse); + request._redirectable = this; + for (var event of events) { + request.on(event, eventHandlers[event]); + } - //skip tag text value - //It may include comments and CDATA value - for (i++; i < xmlData.length; i++) { - if (xmlData[i] === '<') { - if (xmlData[i + 1] === '!') { - //comment or CADATA - i++; - i = readCommentAndCDATA(xmlData, i); - continue; - } else if (xmlData[i+1] === '?') { - i = readPI(xmlData, ++i); - if (i.err) return i; - } else{ - break; - } - } else if (xmlData[i] === '&') { - const afterAmp = validateAmpersand(xmlData, i); - if (afterAmp == -1) - return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); - i = afterAmp; - }else{ - if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { - return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); - } + // RFC7230§5.3.1: When making a request directly to an origin server, […] + // a client MUST send only the absolute path […] as the request-target. + this._currentUrl = /^\//.test(this._options.path) ? + url.format(this._options) : + // When making a request to a proxy, […] + // a client MUST send the target URI in absolute-form […]. + this._options.path; + + // End a redirected request + // (The first request must be ended explicitly with RedirectableRequest#end) + if (this._isRedirect) { + // Write the request entity and end + var i = 0; + var self = this; + var buffers = this._requestBodyBuffers; + (function writeNext(error) { + // Only write if this request has not been redirected yet + /* istanbul ignore else */ + if (request === self._currentRequest) { + // Report any write errors + /* istanbul ignore if */ + if (error) { + self.emit("error", error); + } + // Write the next buffer if there are still left + else if (i < buffers.length) { + var buffer = buffers[i++]; + /* istanbul ignore else */ + if (!request.finished) { + request.write(buffer.data, buffer.encoding, writeNext); } - } //end of reading tag text value - if (xmlData[i] === '<') { - i--; + } + // End the request if `end` has been called on us + else if (self._ended) { + request.end(); } } - } else { - if ( isWhiteSpace(xmlData[i])) { - continue; - } - return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); - } + }()); } +}; - if (!tagFound) { - return getErrorObject('InvalidXml', 'Start tag expected.', 1); - }else if (tags.length == 1) { - return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); - }else if (tags.length > 0) { - return getErrorObject('InvalidXml', "Invalid '"+ - JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ - "' found.", {line: 1, col: 1}); +// Processes a response from the current native request +RedirectableRequest.prototype._processResponse = function (response) { + // Store the redirected response + var statusCode = response.statusCode; + if (this._options.trackRedirects) { + this._redirects.push({ + url: this._currentUrl, + headers: response.headers, + statusCode: statusCode, + }); } - return true; -}; + // RFC7231§6.4: The 3xx (Redirection) class of status code indicates + // that further action needs to be taken by the user agent in order to + // fulfill the request. If a Location header field is provided, + // the user agent MAY automatically redirect its request to the URI + // referenced by the Location field value, + // even if the specific status code is not understood. -function isWhiteSpace(char){ - return char === ' ' || char === '\t' || char === '\n' || char === '\r'; -} -/** - * Read Processing insstructions and skip - * @param {*} xmlData - * @param {*} i - */ -function readPI(xmlData, i) { - const start = i; - for (; i < xmlData.length; i++) { - if (xmlData[i] == '?' || xmlData[i] == ' ') { - //tagname - const tagname = xmlData.substr(start, i - start); - if (i > 5 && tagname === 'xml') { - return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); - } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { - //check if valid attribut string - i++; - break; - } else { - continue; - } - } - } - return i; -} + // If the response is not a redirect; return it as-is + var location = response.headers.location; + if (!location || this._options.followRedirects === false || + statusCode < 300 || statusCode >= 400) { + response.responseUrl = this._currentUrl; + response.redirects = this._redirects; + this.emit("response", response); -function readCommentAndCDATA(xmlData, i) { - if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { - //comment - for (i += 3; i < xmlData.length; i++) { - if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { - i += 2; - break; - } - } - } else if ( - xmlData.length > i + 8 && - xmlData[i + 1] === 'D' && - xmlData[i + 2] === 'O' && - xmlData[i + 3] === 'C' && - xmlData[i + 4] === 'T' && - xmlData[i + 5] === 'Y' && - xmlData[i + 6] === 'P' && - xmlData[i + 7] === 'E' - ) { - let angleBracketsCount = 1; - for (i += 8; i < xmlData.length; i++) { - if (xmlData[i] === '<') { - angleBracketsCount++; - } else if (xmlData[i] === '>') { - angleBracketsCount--; - if (angleBracketsCount === 0) { - break; - } - } - } - } else if ( - xmlData.length > i + 9 && - xmlData[i + 1] === '[' && - xmlData[i + 2] === 'C' && - xmlData[i + 3] === 'D' && - xmlData[i + 4] === 'A' && - xmlData[i + 5] === 'T' && - xmlData[i + 6] === 'A' && - xmlData[i + 7] === '[' - ) { - for (i += 8; i < xmlData.length; i++) { - if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { - i += 2; - break; - } - } + // Clean up + this._requestBodyBuffers = []; + return; } - return i; -} + // The response is a redirect, so abort the current request + destroyRequest(this._currentRequest); + // Discard the remainder of the response to avoid waiting for data + response.destroy(); -const doubleQuote = '"'; -const singleQuote = "'"; + // RFC7231§6.4: A client SHOULD detect and intervene + // in cyclical redirections (i.e., "infinite" redirection loops). + if (++this._redirectCount > this._options.maxRedirects) { + throw new TooManyRedirectsError(); + } -/** - * Keep reading xmlData until '<' is found outside the attribute value. - * @param {string} xmlData - * @param {number} i - */ -function readAttributeStr(xmlData, i) { - let attrStr = ''; - let startChar = ''; - let tagClosed = false; - for (; i < xmlData.length; i++) { - if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { - if (startChar === '') { - startChar = xmlData[i]; - } else if (startChar !== xmlData[i]) { - //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa - } else { - startChar = ''; - } - } else if (xmlData[i] === '>') { - if (startChar === '') { - tagClosed = true; - break; - } - } - attrStr += xmlData[i]; + // Store the request headers if applicable + var requestHeaders; + var beforeRedirect = this._options.beforeRedirect; + if (beforeRedirect) { + requestHeaders = Object.assign({ + // The Host header was set by nativeProtocol.request + Host: response.req.getHeader("host"), + }, this._options.headers); } - if (startChar !== '') { - return false; + + // RFC7231§6.4: Automatic redirection needs to done with + // care for methods not known to be safe, […] + // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change + // the request method from POST to GET for the subsequent request. + var method = this._options.method; + if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || + // RFC7231§6.4.4: The 303 (See Other) status code indicates that + // the server is redirecting the user agent to a different resource […] + // A user agent can perform a retrieval request targeting that URI + // (a GET or HEAD request if using HTTP) […] + (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { + this._options.method = "GET"; + // Drop a possible entity and headers related to it + this._requestBodyBuffers = []; + removeMatchingHeaders(/^content-/i, this._options.headers); } - return { - value: attrStr, - index: i, - tagClosed: tagClosed - }; -} + // Drop the Host header, as the redirect might lead to a different host + var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); -/** - * Select all the attributes whether valid or invalid. - */ -const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + // If the redirect is relative, carry over the host of the last request + var currentUrlParts = parseUrl(this._currentUrl); + var currentHost = currentHostHeader || currentUrlParts.host; + var currentUrl = /^\w+:/.test(location) ? this._currentUrl : + url.format(Object.assign(currentUrlParts, { host: currentHost })); -//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + // Create the redirected request + var redirectUrl = resolveUrl(location, currentUrl); + debug("redirecting to", redirectUrl.href); + this._isRedirect = true; + spreadUrlObject(redirectUrl, this._options); -function validateAttributeString(attrStr, options) { - //console.log("start:"+attrStr+":end"); + // Drop confidential headers when redirecting to a less secure protocol + // or to a different domain that is not a superdomain + if (redirectUrl.protocol !== currentUrlParts.protocol && + redirectUrl.protocol !== "https:" || + redirectUrl.host !== currentHost && + !isSubdomain(redirectUrl.host, currentHost)) { + removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); + } - //if(attrStr.trim().length === 0) return true; //empty string + // Evaluate the beforeRedirect callback + if (isFunction(beforeRedirect)) { + var responseDetails = { + headers: response.headers, + statusCode: statusCode, + }; + var requestDetails = { + url: currentUrl, + method: method, + headers: requestHeaders, + }; + beforeRedirect(this._options, responseDetails, requestDetails); + this._sanitizeOptions(this._options); + } - const matches = util.getAllMatches(attrStr, validAttrStrRegxp); - const attrNames = {}; + // Perform the redirected request + this._performRequest(); +}; - for (let i = 0; i < matches.length; i++) { - if (matches[i][1].length === 0) { - //nospace before attribute name: a="sd"b="saf" - return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) - } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { - return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); - } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { - //independent attribute: ab - return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); +// Wraps the key/value object of protocols with redirect functionality +function wrap(protocols) { + // Default settings + var exports = { + maxRedirects: 21, + maxBodyLength: 10 * 1024 * 1024, + }; + + // Wrap each protocol + var nativeProtocols = {}; + Object.keys(protocols).forEach(function (scheme) { + var protocol = scheme + ":"; + var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; + var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); + + // Executes a request, following redirects + function request(input, options, callback) { + // Parse parameters, ensuring that input is an object + if (isURL(input)) { + input = spreadUrlObject(input); + } + else if (isString(input)) { + input = spreadUrlObject(parseUrl(input)); + } + else { + callback = options; + options = validateUrl(input); + input = { protocol: protocol }; + } + if (isFunction(options)) { + callback = options; + options = null; + } + + // Set defaults + options = Object.assign({ + maxRedirects: exports.maxRedirects, + maxBodyLength: exports.maxBodyLength, + }, input, options); + options.nativeProtocols = nativeProtocols; + if (!isString(options.host) && !isString(options.hostname)) { + options.hostname = "::1"; + } + + assert.equal(options.protocol, protocol, "protocol mismatch"); + debug("options", options); + return new RedirectableRequest(options, callback); } - /* else if(matches[i][6] === undefined){//attribute without value: ab= - return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; - } */ - const attrName = matches[i][2]; - if (!validateAttrName(attrName)) { - return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + + // Executes a GET request, following redirects + function get(input, options, callback) { + var wrappedRequest = wrappedProtocol.request(input, options, callback); + wrappedRequest.end(); + return wrappedRequest; } - if (!attrNames.hasOwnProperty(attrName)) { - //check for duplicate attribute. - attrNames[attrName] = 1; - } else { - return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + + // Expose the properties on the wrapped protocol + Object.defineProperties(wrappedProtocol, { + request: { value: request, configurable: true, enumerable: true, writable: true }, + get: { value: get, configurable: true, enumerable: true, writable: true }, + }); + }); + return exports; +} + +function noop() { /* empty */ } + +function parseUrl(input) { + var parsed; + /* istanbul ignore else */ + if (useNativeURL) { + parsed = new URL(input); + } + else { + // Ensure the URL is valid and absolute + parsed = validateUrl(url.parse(input)); + if (!isString(parsed.protocol)) { + throw new InvalidUrlError({ input }); } } + return parsed; +} - return true; +function resolveUrl(relative, base) { + /* istanbul ignore next */ + return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); } -function validateNumberAmpersand(xmlData, i) { - let re = /\d/; - if (xmlData[i] === 'x') { - i++; - re = /[\da-fA-F]/; +function validateUrl(input) { + if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { + throw new InvalidUrlError({ input: input.href || input }); } - for (; i < xmlData.length; i++) { - if (xmlData[i] === ';') - return i; - if (!xmlData[i].match(re)) - break; + if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { + throw new InvalidUrlError({ input: input.href || input }); } - return -1; + return input; } -function validateAmpersand(xmlData, i) { - // https://www.w3.org/TR/xml/#dt-charref - i++; - if (xmlData[i] === ';') - return -1; - if (xmlData[i] === '#') { - i++; - return validateNumberAmpersand(xmlData, i); +function spreadUrlObject(urlObject, target) { + var spread = target || {}; + for (var key of preservedUrlFields) { + spread[key] = urlObject[key]; } - let count = 0; - for (; i < xmlData.length; i++, count++) { - if (xmlData[i].match(/\w/) && count < 20) - continue; - if (xmlData[i] === ';') - break; - return -1; + + // Fix IPv6 hostname + if (spread.hostname.startsWith("[")) { + spread.hostname = spread.hostname.slice(1, -1); } - return i; + // Ensure port is a number + if (spread.port !== "") { + spread.port = Number(spread.port); + } + // Concatenate path + spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; + + return spread; } -function getErrorObject(code, message, lineNumber) { - return { - err: { - code: code, - msg: message, - line: lineNumber.line || lineNumber, - col: lineNumber.col, +function removeMatchingHeaders(regex, headers) { + var lastValue; + for (var header in headers) { + if (regex.test(header)) { + lastValue = headers[header]; + delete headers[header]; + } + } + return (lastValue === null || typeof lastValue === "undefined") ? + undefined : String(lastValue).trim(); +} + +function createErrorType(code, message, baseClass) { + // Create constructor + function CustomError(properties) { + Error.captureStackTrace(this, this.constructor); + Object.assign(this, properties || {}); + this.code = code; + this.message = this.cause ? message + ": " + this.cause.message : message; + } + + // Attach constructor and set default properties + CustomError.prototype = new (baseClass || Error)(); + Object.defineProperties(CustomError.prototype, { + constructor: { + value: CustomError, + enumerable: false, }, - }; + name: { + value: "Error [" + code + "]", + enumerable: false, + }, + }); + return CustomError; } -function validateAttrName(attrName) { - return util.isName(attrName); +function destroyRequest(request, error) { + for (var event of events) { + request.removeListener(event, eventHandlers[event]); + } + request.on("error", noop); + request.destroy(error); } -// const startsWithXML = /^xml/i; +function isSubdomain(subdomain, domain) { + assert(isString(subdomain) && isString(domain)); + var dot = subdomain.length - domain.length - 1; + return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); +} -function validateTagName(tagname) { - return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +function isString(value) { + return typeof value === "string" || value instanceof String; } -//this function returns the line number for the character at the given index -function getLineNumberForPosition(xmlData, index) { - const lines = xmlData.substring(0, index).split(/\r?\n/); - return { - line: lines.length, +function isFunction(value) { + return typeof value === "function"; +} - // column number is last line's length + 1, because column numbering starts at 1: - col: lines[lines.length - 1].length + 1 - }; +function isBuffer(value) { + return typeof value === "object" && ("length" in value); } -//this function returns the position of the first character of match within attrStr -function getPositionFromMatch(match) { - return match.startIndex + match[1].length; +function isURL(value) { + return URL && value instanceof URL; } +// Exports +module.exports = wrap({ http: http, https: https }); +module.exports.wrap = wrap; + /***/ }), -/***/ 660: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 66129: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -//parse Empty Node as self closing node -const buildFromOrderedJs = __nccwpck_require__(2462); - -const defaultOptions = { - attributeNamePrefix: '@_', - attributesGroupName: false, - textNodeName: '#text', - ignoreAttributes: true, - cdataPropName: false, - format: false, - indentBy: ' ', - suppressEmptyNode: false, - suppressUnpairedNode: true, - suppressBooleanAttributes: true, - tagValueProcessor: function(key, a) { - return a; - }, - attributeValueProcessor: function(attrName, a) { - return a; - }, - preserveOrder: false, - commentPropName: false, - unpairedTags: [], - entities: [ - { regex: new RegExp("&", "g"), val: "&" },//it must be on top - { regex: new RegExp(">", "g"), val: ">" }, - { regex: new RegExp("<", "g"), val: "<" }, - { regex: new RegExp("\'", "g"), val: "'" }, - { regex: new RegExp("\"", "g"), val: """ } - ], - processEntities: true, - stopNodes: [], - // transformTagName: false, - // transformAttributeName: false, - oneListGroup: false +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +const url_1 = __nccwpck_require__(57310); +const util_1 = __nccwpck_require__(21980); +const extend_1 = __importDefault(__nccwpck_require__(38171)); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/i.test(key)) { + headers[key] = REDACT; + } + // any casing of `Authorization` + if (/^authorization$/i.test(key)) { + headers[key] = REDACT; + } + // anything containing secret, such as 'client secret' + if (/secret/i.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/i.test(text) || + /assertion=/i.test(text) || + /secret/i.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + if ('client_secret' in obj) { + obj['client_secret'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + if (url.searchParams.has('client_secret')) { + url.searchParams.set('client_secret', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +exports.defaultErrorRedactor = defaultErrorRedactor; +//# sourceMappingURL=common.js.map -function Builder(options) { - this.options = Object.assign({}, defaultOptions, options); - if (this.options.ignoreAttributes || this.options.attributesGroupName) { - this.isAttribute = function(/*a*/) { - return false; - }; - } else { - this.attrPrefixLen = this.options.attributeNamePrefix.length; - this.isAttribute = isAttribute; - } +/***/ }), - this.processTextOrObjNode = processTextOrObjNode +/***/ 28133: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (this.options.format) { - this.indentate = indentate; - this.tagEndChar = '>\n'; - this.newLine = '\n'; - } else { - this.indentate = function() { - return ''; - }; - this.tagEndChar = '>'; - this.newLine = ''; - } +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(38171)); +const https_1 = __nccwpck_require__(95687); +const node_fetch_1 = __importDefault(__nccwpck_require__(80467)); +const querystring_1 = __importDefault(__nccwpck_require__(63477)); +const is_stream_1 = __importDefault(__nccwpck_require__(41554)); +const url_1 = __nccwpck_require__(57310); +const common_1 = __nccwpck_require__(66129); +const retry_1 = __nccwpck_require__(31052); +const stream_1 = __nccwpck_require__(12781); +const uuid_1 = __nccwpck_require__(75840); +const interceptor_1 = __nccwpck_require__(14309); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + _Gaxios_instances.add(this); + this.agentCache = new Map(); + this.defaults = defaults || {}; + this.interceptors = { + request: new interceptor_1.GaxiosInterceptorManager(), + response: new interceptor_1.GaxiosInterceptorManager(), + }; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); + return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _b; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } + /** + * Creates an async generator that yields the pieces of a multipart/related request body. + * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive + * multipart/related requests are not currently supported. + * + * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. + * @param {string} boundary the boundary string to be placed between each part. + */ + async *getMultipartRequest(multipartOptions, boundary) { + const finale = `--${boundary}--`; + for (const currentPart of multipartOptions) { + const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; + const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; + yield preamble; + if (typeof currentPart.content === 'string') { + yield currentPart.content; + } + else { + yield* currentPart.content; + } + yield '\r\n'; + } + yield finale; + } } - -Builder.prototype.build = function(jObj) { - if(this.options.preserveOrder){ - return buildFromOrderedJs(jObj, this.options); - }else { - if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ - jObj = { - [this.options.arrayNodeName] : jObj - } +exports.Gaxios = Gaxios; +_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { + var _b, _c; + const candidate = new url_1.URL(url); + const noProxyList = [...noProxy]; + const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; + for (const rule of noProxyEnvList) { + noProxyList.push(rule.trim()); } - return this.j2x(jObj, 0).val; - } -}; - -Builder.prototype.j2x = function(jObj, level) { - let attrStr = ''; - let val = ''; - for (let key in jObj) { - if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; - if (typeof jObj[key] === 'undefined') { - // supress undefined node only if it is not an attribute - if (this.isAttribute(key)) { - val += ''; - } - } else if (jObj[key] === null) { - // null attribute should be ignored by the attribute list, but should not cause the tag closing - if (this.isAttribute(key)) { - val += ''; - } else if (key[0] === '?') { - val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; - } else { - val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - } - // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - } else if (jObj[key] instanceof Date) { - val += this.buildTextValNode(jObj[key], key, '', level); - } else if (typeof jObj[key] !== 'object') { - //premitive type - const attr = this.isAttribute(key); - if (attr) { - attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); - }else { - //tag value - if (key === this.options.textNodeName) { - let newval = this.options.tagValueProcessor(key, '' + jObj[key]); - val += this.replaceEntitiesValue(newval); - } else { - val += this.buildTextValNode(jObj[key], key, '', level); + for (const rule of noProxyList) { + // Match regex + if (rule instanceof RegExp) { + if (rule.test(candidate.toString())) { + return false; + } } - } - } else if (Array.isArray(jObj[key])) { - //repeated nodes - const arrLen = jObj[key].length; - let listTagVal = ""; - for (let j = 0; j < arrLen; j++) { - const item = jObj[key][j]; - if (typeof item === 'undefined') { - // supress undefined node - } else if (item === null) { - if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; - else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; - } else if (typeof item === 'object') { - if(this.options.oneListGroup ){ - listTagVal += this.j2x(item, level + 1).val; - }else{ - listTagVal += this.processTextOrObjNode(item, key, level) - } - } else { - listTagVal += this.buildTextValNode(item, key, '', level); + // Match URL + else if (rule instanceof url_1.URL) { + if (rule.origin === candidate.origin) { + return false; + } } - } - if(this.options.oneListGroup){ - listTagVal = this.buildObjectNode(listTagVal, key, '', level); - } - val += listTagVal; - } else { - //nested node - if (this.options.attributesGroupName && key === this.options.attributesGroupName) { - const Ks = Object.keys(jObj[key]); - const L = Ks.length; - for (let j = 0; j < L; j++) { - attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + // Match string regex + else if (rule.startsWith('*.') || rule.startsWith('.')) { + const cleanedRule = rule.replace(/^\*\./, '.'); + if (candidate.hostname.endsWith(cleanedRule)) { + return false; + } + } + // Basic string match + else if (rule === candidate.origin || + rule === candidate.hostname || + rule === candidate.href) { + return false; } - } else { - val += this.processTextOrObjNode(jObj[key], key, level) - } } - } - return {attrStr: attrStr, val: val}; -}; - -Builder.prototype.buildAttrPairStr = function(attrName, val){ - val = this.options.attributeValueProcessor(attrName, '' + val); - val = this.replaceEntitiesValue(val); - if (this.options.suppressBooleanAttributes && val === "true") { - return ' ' + attrName; - } else return ' ' + attrName + '="' + val + '"'; -} - -function processTextOrObjNode (object, key, level) { - const result = this.j2x(object, level + 1); - if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { - return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); - } else { - return this.buildObjectNode(result.val, key, result.attrStr, level); - } -} - -Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { - if(val === ""){ - if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; - else { - return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + return true; +}, _Gaxios_applyRequestInterceptors = +/** + * Applies the request interceptors. The request interceptors are applied after the + * call to prepareRequest is completed. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyRequestInterceptors(options) { + let promiseChain = Promise.resolve(options); + for (const interceptor of this.interceptors.request.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } } - }else{ - - let tagEndExp = '} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyResponseInterceptors(response) { + let promiseChain = Promise.resolve(response); + for (const interceptor of this.interceptors.response.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } } - - // attrStr is an empty string in case the attribute came as undefined or null - if ((attrStr || attrStr === '') && val.indexOf('<') === -1) { - return ( this.indentate(level) + '<' + key + attrStr + piClosingChar + '>' + val + tagEndExp ); - } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { - return this.indentate(level) + `` + this.newLine; - }else { - return ( - this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + - val + - this.indentate(level) + tagEndExp ); + return promiseChain; +}, _Gaxios_prepareRequest = +/** + * Validates the options, merges them with defaults, and prepare request. + * + * @param options The original options passed from the client. + * @returns Prepared options, ready to make a request + */ +async function _Gaxios_prepareRequest(options) { + var _b, _c, _d, _e; + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); } - } -} - -Builder.prototype.closeTag = function(key){ - let closeTag = ""; - if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired - if(!this.options.suppressUnpairedNode) closeTag = "/" - }else if(this.options.suppressEmptyNode){ //empty - closeTag = "/"; - }else{ - closeTag = `>` + this.newLine; - }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { - return this.indentate(level) + `` + this.newLine; - }else if(key[0] === "?") {//PI tag - return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; - }else{ - let textValue = this.options.tagValueProcessor(key, val); - textValue = this.replaceEntitiesValue(textValue); - - if( textValue === ''){ - return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; - }else{ - return this.indentate(level) + '<' + key + attrStr + '>' + - textValue + - ' 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; } - } -} - -Builder.prototype.replaceEntitiesValue = function(textValue){ - if(textValue && textValue.length > 0 && this.options.processEntities){ - for (let i=0; i { - -const EOL = "\n"; - -/** - * - * @param {array} jArray - * @param {any} options - * @returns - */ -function toXml(jArray, options) { - let indentation = ""; - if (options.format && options.indentBy.length > 0) { - indentation = EOL; + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; } - return arrToStr(jArray, options, "", indentation); -} - -function arrToStr(arr, options, jPath, indentation) { - let xmlStr = ""; - let isPreviousElementTag = false; - - for (let i = 0; i < arr.length; i++) { - const tagObj = arr[i]; - const tagName = propName(tagObj); - if(tagName === undefined) continue; - - let newJPath = ""; - if (jPath.length === 0) newJPath = tagName - else newJPath = `${jPath}.${tagName}`; - - if (tagName === options.textNodeName) { - let tagText = tagObj[tagName]; - if (!isStopNode(newJPath, options)) { - tagText = options.tagValueProcessor(tagName, tagText); - tagText = replaceEntitiesValue(tagText, options); - } - if (isPreviousElementTag) { - xmlStr += indentation; - } - xmlStr += tagText; - isPreviousElementTag = false; - continue; - } else if (tagName === options.cdataPropName) { - if (isPreviousElementTag) { - xmlStr += indentation; - } - xmlStr += ``; - isPreviousElementTag = false; - continue; - } else if (tagName === options.commentPropName) { - xmlStr += indentation + ``; - isPreviousElementTag = true; - continue; - } else if (tagName[0] === "?") { - const attStr = attr_to_str(tagObj[":@"], options); - const tempInd = tagName === "?xml" ? "" : indentation; - let piTextNodeName = tagObj[tagName][0][options.textNodeName]; - piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing - xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; - isPreviousElementTag = true; - continue; + opts.headers = opts.headers || {}; + if (opts.multipart === undefined && opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; } - let newIdentation = indentation; - if (newIdentation !== "") { - newIdentation += options.indentBy; + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } } - const attStr = attr_to_str(tagObj[":@"], options); - const tagStart = indentation + `<${tagName}${attStr}`; - const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); - if (options.unpairedTags.indexOf(tagName) !== -1) { - if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; - else xmlStr += tagStart + "/>"; - } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { - xmlStr += tagStart + "/>"; - } else if (tagValue && tagValue.endsWith(">")) { - xmlStr += tagStart + `>${tagValue}${indentation}`; - } else { - xmlStr += tagStart + ">"; - if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; } - isPreviousElementTag = true; + else { + opts.body = opts.data; + } } - - return xmlStr; -} - -function propName(obj) { - const keys = Object.keys(obj); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - if(!obj.hasOwnProperty(key)) continue; - if (key !== ":@") return key; + else if (opts.multipart && opts.multipart.length > 0) { + // note: once the minimum version reaches Node 16, + // this can be replaced with randomUUID() function from crypto + // and the dependency on UUID removed + const boundary = (0, uuid_1.v4)(); + opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + const bodyStream = new stream_1.PassThrough(); + opts.body = bodyStream; + (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); } -} - -function attr_to_str(attrMap, options) { - let attrStr = ""; - if (attrMap && !options.ignoreAttributes) { - for (let attr in attrMap) { - if(!attrMap.hasOwnProperty(attr)) continue; - let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); - attrVal = replaceEntitiesValue(attrVal, options); - if (attrVal === true && options.suppressBooleanAttributes) { - attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; - } else { - attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; - } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = opts.proxy || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || + ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); + const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); + if (opts.agent) { + // don't do any of the following options - use the user-provided agent. + } + else if (proxy && urlMayUseProxy) { + const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + opts.agent = new HttpsProxyAgent(proxy, { + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(proxy, opts.agent); } } - return attrStr; -} + else if (opts.cert && opts.key) { + // Configure client for mTLS + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; +}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { + __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(77219)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); + return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); +}; +/** + * A cache for the lazily-loaded proxy agent. + * + * Should use {@link Gaxios[#getProxyAgent]} to retrieve. + */ +// using `import` to dynamically import the types here +_Gaxios_proxyAgent = { value: void 0 }; +//# sourceMappingURL=gaxios.js.map + +/***/ }), + +/***/ 59555: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -function isStopNode(jPath, options) { - jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); - let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); - for (let index in options.stopNodes) { - if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; - } - return false; -} +"use strict"; -function replaceEntitiesValue(textValue, options) { - if (textValue && textValue.length > 0 && options.processEntities) { - for (let i = 0; i < options.entities.length; i++) { - const entity = options.entities[i]; - textValue = textValue.replace(entity.regex, entity.val); - } +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - return textValue; + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +const gaxios_1 = __nccwpck_require__(28133); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(66129); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); +__exportStar(__nccwpck_require__(14309), exports); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); } -module.exports = toXml; - +exports.request = request; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6072: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const util = __nccwpck_require__(8280); +/***/ 14309: +/***/ ((__unused_webpack_module, exports) => { -//TODO: handle comments -function readDocType(xmlData, i){ - - const entities = {}; - if( xmlData[i + 3] === 'O' && - xmlData[i + 4] === 'C' && - xmlData[i + 5] === 'T' && - xmlData[i + 6] === 'Y' && - xmlData[i + 7] === 'P' && - xmlData[i + 8] === 'E') - { - i = i+9; - let angleBracketsCount = 1; - let hasBody = false, comment = false; - let exp = ""; - for(;i') { //Read tag content - if(comment){ - if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ - comment = false; - angleBracketsCount--; - } - }else{ - angleBracketsCount--; - } - if (angleBracketsCount === 0) { - break; - } - }else if( xmlData[i] === '['){ - hasBody = true; - }else{ - exp += xmlData[i]; - } - } - if(angleBracketsCount !== 0){ - throw new Error(`Unclosed DOCTYPE`); - } - }else{ - throw new Error(`Invalid Tag instead of DOCTYPE`); - } - return {entities, i}; +// Copyright 2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosInterceptorManager = void 0; +/** + * Class to manage collections of GaxiosInterceptors for both requests and responses. + */ +class GaxiosInterceptorManager extends Set { } +exports.GaxiosInterceptorManager = GaxiosInterceptorManager; +//# sourceMappingURL=interceptor.js.map -function readEntityExp(xmlData,i){ - //External entities are not supported - // +/***/ }), - //Parameter entities are not supported - // +/***/ 31052: +/***/ ((__unused_webpack_module, exports) => { - //Internal entities are supported - // - - //read EntityName - let entityName = ""; - for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { - // if(xmlData[i] === " ") continue; - // else - entityName += xmlData[i]; - } - entityName = entityName.trim(); - if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); +"use strict"; - //read Entity Value - const startChar = xmlData[i++]; - let val = "" - for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { - val += xmlData[i]; +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = void 0; +async function getRetryConfig(err) { + var _a; + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; } - return [entityName, val, i]; -} - -function isComment(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === '-' && - xmlData[i+3] === '-') return true - return false + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 408 - Retry ("Request Timeout") + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [408, 408], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((2^c - 1 / 2) * 1000) + const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; } -function isEntity(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'E' && - xmlData[i+3] === 'N' && - xmlData[i+4] === 'T' && - xmlData[i+5] === 'I' && - xmlData[i+6] === 'T' && - xmlData[i+7] === 'Y') return true - return false +exports.getRetryConfig = getRetryConfig; +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; } -function isElement(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'E' && - xmlData[i+3] === 'L' && - xmlData[i+4] === 'E' && - xmlData[i+5] === 'M' && - xmlData[i+6] === 'E' && - xmlData[i+7] === 'N' && - xmlData[i+8] === 'T') return true - return false +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; } +//# sourceMappingURL=retry.js.map -function isAttlist(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'A' && - xmlData[i+3] === 'T' && - xmlData[i+4] === 'T' && - xmlData[i+5] === 'L' && - xmlData[i+6] === 'I' && - xmlData[i+7] === 'S' && - xmlData[i+8] === 'T') return true - return false -} -function isNotation(xmlData, i){ - if(xmlData[i+1] === '!' && - xmlData[i+2] === 'N' && - xmlData[i+3] === 'O' && - xmlData[i+4] === 'T' && - xmlData[i+5] === 'A' && - xmlData[i+6] === 'T' && - xmlData[i+7] === 'I' && - xmlData[i+8] === 'O' && - xmlData[i+9] === 'N') return true - return false -} +/***/ }), -function validateEntityName(name){ - if (util.isName(name)) - return name; - else - throw new Error(`Invalid entity name ${name}`); -} +/***/ 21980: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = readDocType; +"use strict"; +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6318); +//# sourceMappingURL=util.js.map /***/ }), -/***/ 2821: -/***/ ((__unused_webpack_module, exports) => { +/***/ 51904: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; -const defaultOptions = { - preserveOrder: false, - attributeNamePrefix: '@_', - attributesGroupName: false, - textNodeName: '#text', - ignoreAttributes: true, - removeNSPrefix: false, // remove NS from tag name or attribute name if true - allowBooleanAttributes: false, //a tag can have attributes without any value - //ignoreRootElement : false, - parseTagValue: true, - parseAttributeValue: false, - trimValues: true, //Trim string values of tag and attributes - cdataPropName: false, - numberParseOptions: { - hex: true, - leadingZeros: true, - eNotation: true - }, - tagValueProcessor: function(tagName, val) { - return val; - }, - attributeValueProcessor: function(attrName, val) { - return val; - }, - stopNodes: [], //nested tags will not be parsed even for errors - alwaysCreateTextNode: false, - isArray: () => false, - commentPropName: false, - unpairedTags: [], - processEntities: true, - htmlEntities: false, - ignoreDeclaration: false, - ignorePiTags: false, - transformTagName: false, - transformAttributeName: false, - updateTag: function(tagName, jPath, attrs){ - return tagName - }, - // skipEmptyListItem: false -}; - -const buildOptions = function(options) { - return Object.assign({}, defaultOptions, options); +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; +const fs_1 = __nccwpck_require__(57147); +const os_1 = __nccwpck_require__(22037); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', }; - -exports.buildOptions = buildOptions; -exports.defaultOptions = defaultOptions; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +exports.isGoogleCloudServerless = isGoogleCloudServerless; +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +exports.isGoogleComputeEngine = isGoogleComputeEngine; +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +exports.detectGCPResidency = detectGCPResidency; +//# sourceMappingURL=gcp-residency.js.map /***/ }), -/***/ 5832: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 3563: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -///@ts-check - -const util = __nccwpck_require__(8280); -const xmlNode = __nccwpck_require__(7462); -const readDocType = __nccwpck_require__(6072); -const toNumber = __nccwpck_require__(4526); - -// const regx = -// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' -// .replace(/NAME/g, util.nameRegexp); - -//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); -//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); - -class OrderedObjParser{ - constructor(options){ - this.options = options; - this.currentNode = null; - this.tagsNodeStack = []; - this.docTypeEntities = {}; - this.lastEntities = { - "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, - "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, - "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, - "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, - }; - this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; - this.htmlEntities = { - "space": { regex: /&(nbsp|#160);/g, val: " " }, - // "lt" : { regex: /&(lt|#60);/g, val: "<" }, - // "gt" : { regex: /&(gt|#62);/g, val: ">" }, - // "amp" : { regex: /&(amp|#38);/g, val: "&" }, - // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, - // "apos" : { regex: /&(apos|#39);/g, val: "'" }, - "cent" : { regex: /&(cent|#162);/g, val: "¢" }, - "pound" : { regex: /&(pound|#163);/g, val: "£" }, - "yen" : { regex: /&(yen|#165);/g, val: "¥" }, - "euro" : { regex: /&(euro|#8364);/g, val: "€" }, - "copyright" : { regex: /&(copy|#169);/g, val: "©" }, - "reg" : { regex: /&(reg|#174);/g, val: "®" }, - "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, - "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, - "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, - }; - this.addExternalEntities = addExternalEntities; - this.parseXml = parseXml; - this.parseTextData = parseTextData; - this.resolveNameSpace = resolveNameSpace; - this.buildAttributesMap = buildAttributesMap; - this.isItStopNode = isItStopNode; - this.replaceEntitiesValue = replaceEntitiesValue; - this.readStopNodeData = readStopNodeData; - this.saveTextToParentTag = saveTextToParentTag; - this.addChild = addChild; - } - -} - -function addExternalEntities(externalEntities){ - const entKeys = Object.keys(externalEntities); - for (let i = 0; i < entKeys.length; i++) { - const ent = entKeys[i]; - this.lastEntities[ent] = { - regex: new RegExp("&"+ent+";","g"), - val : externalEntities[ent] +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - } -} - + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const jsonBigint = __nccwpck_require__(55031); +const gcp_residency_1 = __nccwpck_require__(51904); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); /** - * @param {string} val - * @param {string} tagName - * @param {string} jPath - * @param {boolean} dontTrim - * @param {boolean} hasAttributes - * @param {boolean} isLeafNode - * @param {boolean} escapeEntities + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. */ -function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { - if (val !== undefined) { - if (this.options.trimValues && !dontTrim) { - val = val.trim(); +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; } - if(val.length > 0){ - if(!escapeEntities) val = this.replaceEntitiesValue(val); - - const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); - if(newval === null || newval === undefined){ - //don't parse - return val; - }else if(typeof newval !== typeof val || newval !== val){ - //overwrite - return newval; - }else if(this.options.trimValues){ - return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); - }else{ - const trimmedVal = val.trim(); - if(trimmedVal === val){ - return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); - }else{ - return val; - } - } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; } - } + return new URL(exports.BASE_PATH, baseUrl).href; } - -function resolveNameSpace(tagname) { - if (this.options.removeNSPrefix) { - const tags = tagname.split(':'); - const prefix = tagname.charAt(0) === '/' ? '/' : ''; - if (tags[0] === 'xmlns') { - return ''; +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; } - if (tags.length === 2) { - tagname = prefix + tags[1]; + else { + metadataKey = type; } - } - return tagname; -} - -//TODO: change regex to capture NS -//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); -const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); - -function buildAttributesMap(attrStr, jPath, tagName) { - if (!this.options.ignoreAttributes && typeof attrStr === 'string') { - // attrStr = attrStr.replace(/\r?\n/g, ' '); - //attrStr = attrStr || attrStr.trim(); - - const matches = util.getAllMatches(attrStr, attrsRegx); - const len = matches.length; //don't make it inline - const attrs = {}; - for (let i = 0; i < len; i++) { - const attrName = this.resolveNameSpace(matches[i][1]); - let oldVal = matches[i][4]; - let aName = this.options.attributeNamePrefix + attrName; - if (attrName.length) { - if (this.options.transformAttributeName) { - aName = this.options.transformAttributeName(aName); - } - if(aName === "__proto__") aName = "#__proto__"; - if (oldVal !== undefined) { - if (this.options.trimValues) { - oldVal = oldVal.trim(); - } - oldVal = this.replaceEntitiesValue(oldVal); - const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); - if(newVal === null || newVal === undefined){ - //don't parse - attrs[aName] = oldVal; - }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ - //overwrite - attrs[aName] = newVal; - }else{ - //parse - attrs[aName] = parseValue( - oldVal, - this.options.parseAttributeValue, - this.options.numberParseOptions - ); - } - } else if (this.options.allowBooleanAttributes) { - attrs[aName] = true; + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; } - } + headers = options.headers || headers; + params = options.params || params; } - if (!Object.keys(attrs).length) { - return; + try { + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const res = await requestMethod({ + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + } + else if (!res.data) { + throw new Error('Invalid response from the metadata service'); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; } - if (this.options.attributesGroupName) { - const attrCollection = {}; - attrCollection[this.options.attributesGroupName] = attrs; - return attrCollection; + catch (e) { + const err = e; + if (err.response && err.response.status !== 200) { + err.message = `Unsuccessful response status code. ${err.message}`; + } + throw e; } - return attrs - } } - -const parseXml = function(xmlData) { - xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line - const xmlObj = new xmlNode('!xml'); - let currentNode = xmlObj; - let textData = ""; - let jPath = ""; - for(let i=0; i< xmlData.length; i++){//for each char in XML data - const ch = xmlData[i]; - if(ch === '<'){ - // const nextIndex = i+1; - // const _2ndChar = xmlData[nextIndex]; - if( xmlData[i+1] === '/') {//Closing Tag - const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") - let tagName = xmlData.substring(i+2,closeIndex).trim(); - - if(this.options.removeNSPrefix){ - const colonIndex = tagName.indexOf(":"); - if(colonIndex !== -1){ - tagName = tagName.substr(colonIndex+1); - } - } - - if(this.options.transformTagName) { - tagName = this.options.transformTagName(tagName); - } - - if(currentNode){ - textData = this.saveTextToParentTag(textData, currentNode, jPath); +async function fastFailMetadataRequest(options) { + const secondaryOptions = { + ...options, + url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; } - - //check if last tag of nested tag was unpaired tag - const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); - if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ - throw new Error(`Unpaired tag can not be used as closing tag: `); + else { + responded = true; + throw err; } - let propIndex = 0 - if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ - propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) - this.tagsNodeStack.pop(); - }else{ - propIndex = jPath.lastIndexOf("."); + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; } - jPath = jPath.substring(0, propIndex); - - currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope - textData = ""; - i = closeIndex; - } else if( xmlData[i+1] === '?') { - - let tagData = readTagExp(xmlData,i, false, "?>"); - if(!tagData) throw new Error("Pi Tag is not closed."); - - textData = this.saveTextToParentTag(textData, currentNode, jPath); - if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ - - }else{ - - const childNode = new xmlNode(tagData.tagName); - childNode.add(this.options.textNodeName, ""); - - if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); - } - this.addChild(currentNode, childNode, jPath) - + else { + responded = true; + throw err; } - - - i = tagData.closeIndex + 1; - } else if(xmlData.substr(i + 1, 3) === '!--') { - const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") - if(this.options.commentPropName){ - const comment = xmlData.substring(i + 4, endIndex - 2); - - textData = this.saveTextToParentTag(textData, currentNode, jPath); - - currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +exports.instance = instance; +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +exports.project = project; +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe_domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +exports.universe = universe; +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +exports.bulk = bulk; +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); } - i = endIndex; - } else if( xmlData.substr(i + 1, 2) === '!D') { - const result = readDocType(xmlData, i); - this.docTypeEntities = result.entities; - i = result.i; - }else if(xmlData.substr(i + 1, 2) === '![') { - const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; - const tagExp = xmlData.substring(i + 9,closeIndex); - - textData = this.saveTextToParentTag(textData, currentNode, jPath); - - let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); - if(val == undefined) val = ""; - - //cdata should be set even if it is 0 length string - if(this.options.cdataPropName){ - currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); - }else{ - currentNode.add(this.options.textNodeName, val); + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server } - - i = closeIndex + 2; - }else {//Opening tag - let result = readTagExp(xmlData,i, this.options.removeNSPrefix); - let tagName= result.tagName; - const rawTagName = result.rawTagName; - let tagExp = result.tagExp; - let attrExpPresent = result.attrExpPresent; - let closeIndex = result.closeIndex; - - if (this.options.transformTagName) { - tagName = this.options.transformTagName(tagName); + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); } - - //save text as child node - if (currentNode && textData) { - if(currentNode.tagname !== '!xml'){ - //when nested tag is found - textData = this.saveTextToParentTag(textData, currentNode, jPath, false); - } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); } - - //check if last tag was unpaired tag - const lastTag = currentNode; - if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ - currentNode = this.tagsNodeStack.pop(); - jPath = jPath.substring(0, jPath.lastIndexOf(".")); + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; } - if(tagName !== xmlObj.tagname){ - jPath += jPath ? "." + tagName : tagName; + if (err.response && err.response.status === 404) { + return false; } - if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { - let tagContent = ""; - //self-closing tag - if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ - if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' - tagName = tagName.substr(0, tagName.length - 1); - jPath = jPath.substr(0, jPath.length - 1); - tagExp = tagName; - }else{ - tagExp = tagExp.substr(0, tagExp.length - 1); - } - i = result.closeIndex; - } - //unpaired tag - else if(this.options.unpairedTags.indexOf(tagName) !== -1){ - - i = result.closeIndex; - } - //normal tag - else{ - //read until closing tag is found - const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); - if(!result) throw new Error(`Unexpected end of ${rawTagName}`); - i = result.i; - tagContent = result.tagContent; - } - - const childNode = new xmlNode(tagName); - if(tagName !== tagExp && attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); - } - if(tagContent) { - tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); - } - - jPath = jPath.substr(0, jPath.lastIndexOf(".")); - childNode.add(this.options.textNodeName, tagContent); - - this.addChild(currentNode, childNode, jPath) - }else{ - //selfClosing tag - if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ - if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' - tagName = tagName.substr(0, tagName.length - 1); - jPath = jPath.substr(0, jPath.length - 1); - tagExp = tagName; - }else{ - tagExp = tagExp.substr(0, tagExp.length - 1); - } - - if(this.options.transformTagName) { - tagName = this.options.transformTagName(tagName); - } - - const childNode = new xmlNode(tagName); - if(tagName !== tagExp && attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); - } - this.addChild(currentNode, childNode, jPath) - jPath = jPath.substr(0, jPath.lastIndexOf(".")); - } - //opening tag - else{ - const childNode = new xmlNode( tagName); - this.tagsNodeStack.push(currentNode); - - if(tagName !== tagExp && attrExpPresent){ - childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); } - this.addChild(currentNode, childNode, jPath) - currentNode = childNode; - } - textData = ""; - i = closeIndex; + // Failure to resolve the metadata service means that it is not available. + return false; } - } - }else{ - textData += xmlData[i]; } - } - return xmlObj.child; } - -function addChild(currentNode, childNode, jPath){ - const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) - if(result === false){ - }else if(typeof result === "string"){ - childNode.tagname = result - currentNode.addChild(childNode); - }else{ - currentNode.addChild(childNode); - } +exports.isAvailable = isAvailable; +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; } - -const replaceEntitiesValue = function(val){ - - if(this.options.processEntities){ - for(let entityName in this.docTypeEntities){ - const entity = this.docTypeEntities[entityName]; - val = val.replace( entity.regx, entity.val); - } - for(let entityName in this.lastEntities){ - const entity = this.lastEntities[entityName]; - val = val.replace( entity.regex, entity.val); - } - if(this.options.htmlEntities){ - for(let entityName in this.htmlEntities){ - const entity = this.htmlEntities[entityName]; - val = val.replace( entity.regex, entity.val); - } +exports.resetIsAvailableCache = resetIsAvailableCache; +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); } - val = val.replace( this.ampEntity.regex, this.ampEntity.val); - } - return val; -} -function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { - if (textData) { //store previously collected data as textNode - if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 - - textData = this.parseTextData(textData, - currentNode.tagname, - jPath, - false, - currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, - isLeafNode); - - if (textData !== undefined && textData !== "") - currentNode.add(this.options.textNodeName, textData); - textData = ""; - } - return textData; + return exports.gcpResidencyCache; } - -//TODO: use jPath to simplify the logic +exports.getGCPResidency = getGCPResidency; /** - * - * @param {string[]} stopNodes - * @param {string} jPath - * @param {string} currentTagName + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting */ -function isItStopNode(stopNodes, jPath, currentTagName){ - const allNodesExp = "*." + currentTagName; - for (const stopNodePath in stopNodes) { - const stopNodeExp = stopNodes[stopNodePath]; - if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; - } - return false; +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); } - +exports.setGCPResidency = setGCPResidency; /** - * Returns the tag Expression and where it is ending handling single-double quotes situation - * @param {string} xmlData - * @param {number} i starting index - * @returns + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. */ -function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ - let attrBoundary; - let tagExp = ""; - for (let index = i; index < xmlData.length; index++) { - let ch = xmlData[index]; - if (attrBoundary) { - if (ch === attrBoundary) attrBoundary = "";//reset - } else if (ch === '"' || ch === "'") { - attrBoundary = ch; - } else if (ch === closingChar[0]) { - if(closingChar[1]){ - if(xmlData[index + 1] === closingChar[1]){ - return { - data: tagExp, - index: index - } - } - }else{ - return { - data: tagExp, - index: index - } - } - } else if (ch === '\t') { - ch = " " - } - tagExp += ch; - } +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; } +exports.requestTimeout = requestTimeout; +__exportStar(__nccwpck_require__(51904), exports); +//# sourceMappingURL=index.js.map -function findClosingIndex(xmlData, str, i, errMsg){ - const closingIndex = xmlData.indexOf(str, i); - if(closingIndex === -1){ - throw new Error(errMsg) - }else{ - return closingIndex + str.length - 1; - } -} +/***/ }), -function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ - const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); - if(!result) return; - let tagExp = result.data; - const closeIndex = result.index; - const separatorIndex = tagExp.search(/\s/); - let tagName = tagExp; - let attrExpPresent = true; - if(separatorIndex !== -1){//separate tag name and attributes expression - tagName = tagExp.substring(0, separatorIndex); - tagExp = tagExp.substring(separatorIndex + 1).trimStart(); - } +/***/ 44627: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const rawTagName = tagName; - if(removeNSPrefix){ - const colonIndex = tagName.indexOf(":"); - if(colonIndex !== -1){ - tagName = tagName.substr(colonIndex+1); - attrExpPresent = tagName !== result.data.substr(colonIndex + 1); - } - } +"use strict"; - return { - tagName: tagName, - tagExp: tagExp, - closeIndex: closeIndex, - attrExpPresent: attrExpPresent, - rawTagName: rawTagName, - } -} +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(82361); +const gaxios_1 = __nccwpck_require__(59555); +const transporters_1 = __nccwpck_require__(72649); +const util_1 = __nccwpck_require__(68905); /** - * find paired tag for a stop node - * @param {string} xmlData - * @param {string} tagName - * @param {number} i + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} */ -function readStopNodeData(xmlData, tagName, i){ - const startIndex = i; - // Starting at 1 since we already have an open tag - let openTagCount = 1; - - for (; i < xmlData.length; i++) { - if( xmlData[i] === "<"){ - if (xmlData[i+1] === "/") {//close tag - const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); - let closeTagName = xmlData.substring(i+2,closeIndex).trim(); - if(closeTagName === tagName){ - openTagCount--; - if (openTagCount === 0) { - return { - tagContent: xmlData.substring(startIndex, i), - i : closeIndex - } - } - } - i=closeIndex; - } else if(xmlData[i+1] === '?') { - const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") - i=closeIndex; - } else if(xmlData.substr(i + 1, 3) === '!--') { - const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") - i=closeIndex; - } else if(xmlData.substr(i + 1, 2) === '![') { - const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; - i=closeIndex; - } else { - const tagData = readTagExp(xmlData, i, '>') - - if (tagData) { - const openTagName = tagData && tagData.tagName; - if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { - openTagCount++; - } - i=tagData.closeIndex; - } +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; } - } - }//end for loop -} - -function parseValue(val, shouldParse, options) { - if (shouldParse && typeof val === 'string') { - //console.log(options) - const newval = val.trim(); - if(newval === 'true' ) return true; - else if(newval === 'false' ) return false; - else return toNumber(val, options); - } else { - if (util.isExist(val)) { - return val; - } else { - return ''; + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. + * + * @expiremental + */ + get gaxios() { + if (this.transporter instanceof gaxios_1.Gaxios) { + return this.transporter; + } + else if (this.transporter instanceof transporters_1.DefaultTransporter) { + return this.transporter.instance; + } + else if ('instance' in this.transporter && + this.transporter.instance instanceof gaxios_1.Gaxios) { + return this.transporter.instance; + } + return null; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; } - } } - - -module.exports = OrderedObjParser; +exports.AuthClient = AuthClient; /***/ }), -/***/ 2380: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71569: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -const { buildOptions} = __nccwpck_require__(2821); -const OrderedObjParser = __nccwpck_require__(5832); -const { prettify} = __nccwpck_require__(2882); -const validator = __nccwpck_require__(1739); +"use strict"; -class XMLParser{ - - constructor(options){ - this.externalEntities = {}; - this.options = buildOptions(options); - - } +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(1754); +const baseexternalclient_1 = __nccwpck_require__(40810); +const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(89799); +const util_1 = __nccwpck_require__(68905); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { /** - * Parse XML dats to JS object - * @param {string|Buffer} xmlData - * @param {boolean|Object} validationOption + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. */ - parse(xmlData,validationOption){ - if(typeof xmlData === "string"){ - }else if( xmlData.toString){ - xmlData = xmlData.toString(); - }else{ - throw new Error("XML data is accepted in String or Bytes[] form.") + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !awsSecurityCredentialsSupplier) { + throw new Error('A credential source or AWS security credentials supplier must be specified.'); + } + if (credentialSource && awsSecurityCredentialsSupplier) { + throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); + } + if (awsSecurityCredentialsSupplier) { + this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; + this.regionalCredVerificationUrl = + __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + this.environmentId = credentialSourceOpts.get('environment_id'); + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + const regionUrl = credentialSourceOpts.get('region_url'); + // This is only required if AWS security credentials are not available in + // environment variables. + const securityCredentialsUrl = credentialSourceOpts.get('url'); + const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); + this.awsSecurityCredentialsSupplier = + new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ + regionUrl: regionUrl, + securityCredentialsUrl: securityCredentialsUrl, + imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, + }); + this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + this.awsRequestSigner = null; + this.region = ''; + } + validateEnvironmentId() { + var _b; + const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); } - if( validationOption){ - if(validationOption === true) validationOption = {}; //validate with default options - - const result = validator.validate(xmlData, validationOption); - if (result !== true) { - throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) - } - } - const orderedObjParser = new OrderedObjParser(this.options); - orderedObjParser.addExternalEntities(this.externalEntities); - const orderedResult = orderedObjParser.parseXml(xmlData); - if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; - else return prettify(orderedResult, this.options); } - /** - * Add Entity which is not by default supported by this library - * @param {string} key - * @param {string} value + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. This will call the + * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS + * Security Credentials, then use them to create a signed AWS STS request that + * can be exchanged for a GCP access token. + * @return A promise that resolves with the external subject token. */ - addEntity(key, value){ - if(value.indexOf("&") !== -1){ - throw new Error("Entity value can't have '&'") - }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ - throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") - }else if(value === "&"){ - throw new Error("An entity with value '&' is not permitted"); - }else{ - this.externalEntities[key] = value; + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + ..._a.RETRY_CONFIG, + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); } } +exports.AwsClient = AwsClient; +_a = AwsClient; +_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; -module.exports = XMLParser; /***/ }), -/***/ 2882: -/***/ ((__unused_webpack_module, exports) => { +/***/ 1754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(78043); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; /** - * - * @param {array} node - * @param {any} options - * @returns + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html */ -function prettify(node, options){ - return compress( node, options); -} - +const AWS_REQUEST_TYPE = 'aws4_request'; /** - * - * @param {array} arr - * @param {object} options - * @param {string} jPath - * @returns object + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html */ -function compress(arr, options, jPath){ - let text; - const compressedObj = {}; - for (let i = 0; i < arr.length; i++) { - const tagObj = arr[i]; - const property = propName(tagObj); - let newJpath = ""; - if(jPath === undefined) newJpath = property; - else newJpath = jPath + "." + property; - - if(property === options.textNodeName){ - if(text === undefined) text = tagObj[property]; - else text += "" + tagObj[property]; - }else if(property === undefined){ - continue; - }else if(tagObj[property]){ - - let val = compress(tagObj[property], options, newJpath); - const isLeaf = isLeafTag(val, options); - - if(tagObj[":@"]){ - assignAttributes( val, tagObj[":@"], newJpath, options); - }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ - val = val[options.textNodeName]; - }else if(Object.keys(val).length === 0){ - if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; - else val = ""; - } - - if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { - if(!Array.isArray(compressedObj[property])) { - compressedObj[property] = [ compressedObj[property] ]; +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); } - compressedObj[property].push(val); - }else{ - //TODO: if a node is not an array, then check if it should be an array - //also determine if it is a leaf node - if (options.isArray(property, newJpath, isLeaf )) { - compressedObj[property] = [val]; - }else{ - compressedObj[property] = val; + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); } - } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; } - - } - // if(text && text.length > 0) compressedObj[options.textNodeName] = text; - if(typeof text === "string"){ - if(text.length > 0) compressedObj[options.textNodeName] = text; - }else if(text !== undefined) compressedObj[options.textNodeName] = text; - return compressedObj; } - -function propName(obj){ - const keys = Object.keys(obj); - for (let i = 0; i < keys.length; i++) { - const key = keys[i]; - if(key !== ":@") return key; - } +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); } - -function assignAttributes(obj, attrMap, jpath, options){ - if (attrMap) { - const keys = Object.keys(attrMap); - const len = keys.length; //don't make it inline - for (let i = 0; i < len; i++) { - const atrrName = keys[i]; - if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { - obj[atrrName] = [ attrMap[atrrName] ]; - } else { - obj[atrrName] = attrMap[atrrName]; - } - } - } +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; } - -function isLeafTag(obj, options){ - const { textNodeName } = options; - const propCount = Object.keys(obj).length; - - if (propCount === 0) { - return true; - } - - if ( - propCount === 1 && - (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) - ) { - return true; - } - - return false; +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; } -exports.prettify = prettify; /***/ }), -/***/ 7462: -/***/ ((module) => { +/***/ 40810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -class XmlNode{ - constructor(tagname) { - this.tagname = tagname; - this.child = []; //nested tags, text, cdata, comments in order - this[":@"] = {}; //attributes map - } - add(key,val){ - // this.child.push( {name : key, val: val, isCdata: isCdata }); - if(key === "__proto__") key = "#__proto__"; - this.child.push( {[key]: val }); - } - addChild(node) { - if(node.tagname === "__proto__") node.tagname = "#__proto__"; - if(node[":@"] && Object.keys(node[":@"]).length > 0){ - this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); - }else{ - this.child.push( { [node.tagname]: node.child }); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(12781); +const authclient_1 = __nccwpck_require__(44627); +const sts = __nccwpck_require__(86308); +const util_1 = __nccwpck_require__(68905); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(51402); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(44627); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + const opts = (0, util_1.originalOrCamelOptions)(options); + const type = opts.get('type'); + if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + this.supplierContext = { + audience: this.audience, + subjectTokenType: this.subjectTokenType, + transporter: this.transporter, + }; } - }; -}; - - -module.exports = XmlNode; - -/***/ }), - -/***/ 1133: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var debug; - -module.exports = function () { - if (!debug) { - try { - /* eslint global-require: off */ - debug = __nccwpck_require__(8237)("follow-redirects"); + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; } - catch (error) { /* */ } - if (typeof debug !== "function") { - debug = function () { /* */ }; + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; } - } - debug.apply(null, arguments); -}; - - -/***/ }), - -/***/ 7707: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var url = __nccwpck_require__(7310); -var URL = url.URL; -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var Writable = (__nccwpck_require__(2781).Writable); -var assert = __nccwpck_require__(9491); -var debug = __nccwpck_require__(1133); - -// Whether to use the native URL object or the legacy url module -var useNativeURL = false; -try { - assert(new URL()); -} -catch (error) { - useNativeURL = error.code === "ERR_INVALID_URL"; -} - -// URL fields to preserve in copy operations -var preservedUrlFields = [ - "auth", - "host", - "hostname", - "href", - "path", - "pathname", - "port", - "protocol", - "query", - "search", - "hash", -]; - -// Create handlers that pass events from native requests -var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; -var eventHandlers = Object.create(null); -events.forEach(function (event) { - eventHandlers[event] = function (arg1, arg2, arg3) { - this._redirectable.emit(event, arg1, arg2, arg3); - }; -}); - -// Error types with codes -var InvalidUrlError = createErrorType( - "ERR_INVALID_URL", - "Invalid URL", - TypeError -); -var RedirectionError = createErrorType( - "ERR_FR_REDIRECTION_FAILURE", - "Redirected request failed" -); -var TooManyRedirectsError = createErrorType( - "ERR_FR_TOO_MANY_REDIRECTS", - "Maximum number of redirects exceeded", - RedirectionError -); -var MaxBodyLengthExceededError = createErrorType( - "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", - "Request body larger than maxBodyLength limit" -); -var WriteAfterEndError = createErrorType( - "ERR_STREAM_WRITE_AFTER_END", - "write after end" -); - -// istanbul ignore next -var destroy = Writable.prototype.destroy || noop; - -// An HTTP(S) request that can be redirected -function RedirectableRequest(options, responseCallback) { - // Initialize the request - Writable.call(this); - this._sanitizeOptions(options); - this._options = options; - this._ended = false; - this._ending = false; - this._redirectCount = 0; - this._redirects = []; - this._requestBodyLength = 0; - this._requestBodyBuffers = []; - - // Attach a callback if passed - if (responseCallback) { - this.on("response", responseCallback); - } - - // React to responses of native requests - var self = this; - this._onNativeResponse = function (response) { - try { - self._processResponse(response); + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; } - catch (cause) { - self.emit("error", cause instanceof RedirectionError ? - cause : new RedirectionError({ cause: cause })); + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); } - }; - - // Perform the first request - this._performRequest(); -} -RedirectableRequest.prototype = Object.create(Writable.prototype); - -RedirectableRequest.prototype.abort = function () { - destroyRequest(this._currentRequest); - this._currentRequest.abort(); - this.emit("abort"); -}; - -RedirectableRequest.prototype.destroy = function (error) { - destroyRequest(this._currentRequest, error); - destroy.call(this, error); - return this; -}; - -// Writes buffered data to the current native request -RedirectableRequest.prototype.write = function (data, encoding, callback) { - // Writing is not allowed if end has been called - if (this._ending) { - throw new WriteAfterEndError(); - } - - // Validate input and shift parameters if necessary - if (!isString(data) && !isBuffer(data)) { - throw new TypeError("data should be a string, Buffer or Uint8Array"); - } - if (isFunction(encoding)) { - callback = encoding; - encoding = null; - } - - // Ignore empty buffers, since writing them doesn't invoke the callback - // https://github.com/nodejs/node/issues/22066 - if (data.length === 0) { - if (callback) { - callback(); + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } } - return; - } - // Only write when we don't exceed the maximum body length - if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { - this._requestBodyLength += data.length; - this._requestBodyBuffers.push({ data: data, encoding: encoding }); - this._currentRequest.write(data, encoding, callback); - } - // Error when we exceed the maximum body length - else { - this.emit("error", new MaxBodyLengthExceededError()); - this.abort(); - } -}; - -// Ends the current native request -RedirectableRequest.prototype.end = function (data, encoding, callback) { - // Shift parameters if necessary - if (isFunction(data)) { - callback = data; - data = encoding = null; - } - else if (isFunction(encoding)) { - callback = encoding; - encoding = null; - } - - // Write data if needed and end - if (!data) { - this._ended = this._ending = true; - this._currentRequest.end(null, null, callback); - } - else { - var self = this; - var currentRequest = this._currentRequest; - this.write(data, encoding, function () { - self._ended = true; - currentRequest.end(null, null, callback); - }); - this._ending = true; - } -}; - -// Sets a header value on the current native request -RedirectableRequest.prototype.setHeader = function (name, value) { - this._options.headers[name] = value; - this._currentRequest.setHeader(name, value); -}; - -// Clears a header value on the current native request -RedirectableRequest.prototype.removeHeader = function (name) { - delete this._options.headers[name]; - this._currentRequest.removeHeader(name); -}; - -// Global timeout for all underlying requests -RedirectableRequest.prototype.setTimeout = function (msecs, callback) { - var self = this; - - // Destroys the socket on timeout - function destroyOnTimeout(socket) { - socket.setTimeout(msecs); - socket.removeListener("timeout", socket.destroy); - socket.addListener("timeout", socket.destroy); - } - - // Sets up a timer to trigger a timeout event - function startTimer(socket) { - if (self._timeout) { - clearTimeout(self._timeout); + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + ...BaseExternalAccountClient.RETRY_CONFIG, + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; } - self._timeout = setTimeout(function () { - self.emit("timeout"); - clearTimer(); - }, msecs); - destroyOnTimeout(socket); - } - - // Stops a timeout from triggering - function clearTimer() { - // Clear the timeout - if (self._timeout) { - clearTimeout(self._timeout); - self._timeout = null; + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; } - - // Clean up all attached listeners - self.removeListener("abort", clearTimer); - self.removeListener("error", clearTimer); - self.removeListener("response", clearTimer); - self.removeListener("close", clearTimer); - if (callback) { - self.removeListener("timeout", callback); + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; } - if (!self.socket) { - self._currentRequest.removeListener("socket", startTimer); + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + ...BaseExternalAccountClient.RETRY_CONFIG, + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; } - } - - // Attach callback if passed - if (callback) { - this.on("timeout", callback); - } - - // Start the timer if or when the socket is opened - if (this.socket) { - startTimer(this.socket); - } - else { - this._currentRequest.once("socket", startTimer); - } - - // Clean up on events - this.on("socket", destroyOnTimeout); - this.on("abort", clearTimer); - this.on("error", clearTimer); - this.on("response", clearTimer); - this.on("close", clearTimer); - - return this; -}; - -// Proxy all other public ClientRequest methods -[ - "flushHeaders", "getHeader", - "setNoDelay", "setSocketKeepAlive", -].forEach(function (method) { - RedirectableRequest.prototype[method] = function (a, b) { - return this._currentRequest[method](a, b); - }; -}); - -// Proxy all public ClientRequest properties -["aborted", "connection", "socket"].forEach(function (property) { - Object.defineProperty(RedirectableRequest.prototype, property, { - get: function () { return this._currentRequest[property]; }, - }); -}); - -RedirectableRequest.prototype._sanitizeOptions = function (options) { - // Ensure headers are always present - if (!options.headers) { - options.headers = {}; - } - - // Since http.request treats host as an alias of hostname, - // but the url module interprets host as hostname plus port, - // eliminate the host property to avoid confusion. - if (options.host) { - // Use hostname if set, because it has precedence - if (!options.hostname) { - options.hostname = options.host; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; } - delete options.host; - } - - // Complete the URL object when necessary - if (!options.pathname && options.path) { - var searchPos = options.path.indexOf("?"); - if (searchPos < 0) { - options.pathname = options.path; + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; } - else { - options.pathname = options.path.substring(0, searchPos); - options.search = options.path.substring(searchPos); + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; } - } -}; - +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; -// Executes the next native request (initial or redirect) -RedirectableRequest.prototype._performRequest = function () { - // Load the native protocol - var protocol = this._options.protocol; - var nativeProtocol = this._options.nativeProtocols[protocol]; - if (!nativeProtocol) { - throw new TypeError("Unsupported protocol " + protocol); - } - // If specified, use the agent corresponding to the protocol - // (HTTP and HTTPS use different types of agents) - if (this._options.agents) { - var scheme = protocol.slice(0, -1); - this._options.agent = this._options.agents[scheme]; - } +/***/ }), - // Create the native request and set up its event handlers - var request = this._currentRequest = - nativeProtocol.request(this._options, this._onNativeResponse); - request._redirectable = this; - for (var event of events) { - request.on(event, eventHandlers[event]); - } +/***/ 96875: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // RFC7230§5.3.1: When making a request directly to an origin server, […] - // a client MUST send only the absolute path […] as the request-target. - this._currentUrl = /^\//.test(this._options.path) ? - url.format(this._options) : - // When making a request to a proxy, […] - // a client MUST send the target URI in absolute-form […]. - this._options.path; +"use strict"; - // End a redirected request - // (The first request must be ended explicitly with RedirectableRequest#end) - if (this._isRedirect) { - // Write the request entity and end - var i = 0; - var self = this; - var buffers = this._requestBodyBuffers; - (function writeNext(error) { - // Only write if this request has not been redirected yet - /* istanbul ignore else */ - if (request === self._currentRequest) { - // Report any write errors - /* istanbul ignore if */ - if (error) { - self.emit("error", error); +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const gcpMetadata = __nccwpck_require__(3563); +const oauth2client_1 = __nccwpck_require__(3936); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); } - // Write the next buffer if there are still left - else if (i < buffers.length) { - var buffer = buffers[i++]; - /* istanbul ignore else */ - if (!request.finished) { - request.write(buffer.data, buffer.encoding, writeNext); - } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; } - // End the request if `end` has been called on us - else if (self._ended) { - request.end(); + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; } - } - }()); - } -}; - -// Processes a response from the current native request -RedirectableRequest.prototype._processResponse = function (response) { - // Store the redirected response - var statusCode = response.statusCode; - if (this._options.trackRedirects) { - this._redirects.push({ - url: this._currentUrl, - headers: response.headers, - statusCode: statusCode, - }); - } - - // RFC7231§6.4: The 3xx (Redirection) class of status code indicates - // that further action needs to be taken by the user agent in order to - // fulfill the request. If a Location header field is provided, - // the user agent MAY automatically redirect its request to the URI - // referenced by the Location field value, - // even if the specific status code is not understood. - - // If the response is not a redirect; return it as-is - var location = response.headers.location; - if (!location || this._options.followRedirects === false || - statusCode < 300 || statusCode >= 400) { - response.responseUrl = this._currentUrl; - response.redirects = this._redirects; - this.emit("response", response); - - // Clean up - this._requestBodyBuffers = []; - return; - } - - // The response is a redirect, so abort the current request - destroyRequest(this._currentRequest); - // Discard the remainder of the response to avoid waiting for data - response.destroy(); - - // RFC7231§6.4: A client SHOULD detect and intervene - // in cyclical redirections (i.e., "infinite" redirection loops). - if (++this._redirectCount > this._options.maxRedirects) { - throw new TooManyRedirectsError(); - } - - // Store the request headers if applicable - var requestHeaders; - var beforeRedirect = this._options.beforeRedirect; - if (beforeRedirect) { - requestHeaders = Object.assign({ - // The Host header was set by nativeProtocol.request - Host: response.req.getHeader("host"), - }, this._options.headers); - } - - // RFC7231§6.4: Automatic redirection needs to done with - // care for methods not known to be safe, […] - // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change - // the request method from POST to GET for the subsequent request. - var method = this._options.method; - if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || - // RFC7231§6.4.4: The 303 (See Other) status code indicates that - // the server is redirecting the user agent to a different resource […] - // A user agent can perform a retrieval request targeting that URI - // (a GET or HEAD request if using HTTP) […] - (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { - this._options.method = "GET"; - // Drop a possible entity and headers related to it - this._requestBodyBuffers = []; - removeMatchingHeaders(/^content-/i, this._options.headers); - } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; - // Drop the Host header, as the redirect might lead to a different host - var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); - // If the redirect is relative, carry over the host of the last request - var currentUrlParts = parseUrl(this._currentUrl); - var currentHost = currentHostHeader || currentUrlParts.host; - var currentUrl = /^\w+:/.test(location) ? this._currentUrl : - url.format(Object.assign(currentUrlParts, { host: currentHost })); +/***/ }), - // Create the redirected request - var redirectUrl = resolveUrl(location, currentUrl); - debug("redirecting to", redirectUrl.href); - this._isRedirect = true; - spreadUrlObject(redirectUrl, this._options); +/***/ 89799: +/***/ (function(__unused_webpack_module, exports) { - // Drop confidential headers when redirecting to a less secure protocol - // or to a different domain that is not a superdomain - if (redirectUrl.protocol !== currentUrlParts.protocol && - redirectUrl.protocol !== "https:" || - redirectUrl.host !== currentHost && - !isSubdomain(redirectUrl.host, currentHost)) { - removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); - } +"use strict"; - // Evaluate the beforeRedirect callback - if (isFunction(beforeRedirect)) { - var responseDetails = { - headers: response.headers, - statusCode: statusCode, +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultAwsSecurityCredentialsSupplier = void 0; +/** + * Internal AWS security credentials supplier implementation used by {@link AwsClient} + * when a credential source is provided instead of a user defined supplier. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + */ +class DefaultAwsSecurityCredentialsSupplier { + /** + * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information + * from the credential_source stored in the ADC file. + * @param opts The default aws security credentials supplier options object to + * build the supplier with. + */ + constructor(opts) { + _DefaultAwsSecurityCredentialsSupplier_instances.add(this); + this.regionUrl = opts.regionUrl; + this.securityCredentialsUrl = opts.securityCredentialsUrl; + this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Returns the active AWS region. This first checks to see if the region + * is available as an environment variable. If it is not, then the supplier + * will call the region URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS region string. + */ + async getAwsRegion(context) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + } + const metadataHeaders = {}; + if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: metadataHeaders, + }; + const response = await context.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * Returns AWS security credentials. This first checks to see if the credentials + * is available as environment variables. If it is not, then the supplier + * will call the security credentials URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS security credentials. + */ + async getAwsSecurityCredentials(context) { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + } + const metadataHeaders = {}; + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + } +} +exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; +_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = +/** + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the IMDSv2 Session Token. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, }; - var requestDetails = { - url: currentUrl, - method: method, - headers: requestHeaders, + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = +/** + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, }; - beforeRedirect(this._options, responseDetails, requestDetails); - this._sanitizeOptions(this._options); - } - - // Perform the redirected request - this._performRequest(); + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = +/** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ +async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { + const response = await transporter.request({ + ...this.additionalGaxiosOptions, + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); +}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; }; -// Wraps the key/value object of protocols with redirect functionality -function wrap(protocols) { - // Default settings - var exports = { - maxRedirects: 21, - maxBodyLength: 10 * 1024 * 1024, - }; - // Wrap each protocol - var nativeProtocols = {}; - Object.keys(protocols).forEach(function (scheme) { - var protocol = scheme + ":"; - var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; - var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); +/***/ }), - // Executes a request, following redirects - function request(input, options, callback) { - // Parse parameters, ensuring that input is an object - if (isURL(input)) { - input = spreadUrlObject(input); - } - else if (isString(input)) { - input = spreadUrlObject(parseUrl(input)); - } - else { - callback = options; - options = validateUrl(input); - input = { protocol: protocol }; - } - if (isFunction(options)) { - callback = options; - options = null; - } +/***/ 6270: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Set defaults - options = Object.assign({ - maxRedirects: exports.maxRedirects, - maxBodyLength: exports.maxBodyLength, - }, input, options); - options.nativeProtocols = nativeProtocols; - if (!isString(options.host) && !isString(options.hostname)) { - options.hostname = "::1"; - } +"use strict"; - assert.equal(options.protocol, protocol, "protocol mismatch"); - debug("options", options); - return new RedirectableRequest(options, callback); +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(12781); +const authclient_1 = __nccwpck_require__(44627); +const sts = __nccwpck_require__(86308); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; } - - // Executes a GET request, following redirects - function get(input, options, callback) { - var wrappedRequest = wrappedProtocol.request(input, options, callback); - wrappedRequest.end(); - return wrappedRequest; + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; } - - // Expose the properties on the wrapped protocol - Object.defineProperties(wrappedProtocol, { - request: { value: request, configurable: true, enumerable: true, writable: true }, - get: { value: get, configurable: true, enumerable: true, writable: true }, - }); - }); - return exports; -} - -function noop() { /* empty */ } - -function parseUrl(input) { - var parsed; - /* istanbul ignore else */ - if (useNativeURL) { - parsed = new URL(input); - } - else { - // Ensure the URL is valid and absolute - parsed = validateUrl(url.parse(input)); - if (!isString(parsed.protocol)) { - throw new InvalidUrlError({ input }); + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; } - } - return parsed; -} - -function resolveUrl(relative, base) { - /* istanbul ignore next */ - return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); -} - -function validateUrl(input) { - if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { - throw new InvalidUrlError({ input: input.href || input }); - } - if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { - throw new InvalidUrlError({ input: input.href || input }); - } - return input; -} - -function spreadUrlObject(urlObject, target) { - var spread = target || {}; - for (var key of preservedUrlFields) { - spread[key] = urlObject[key]; - } - - // Fix IPv6 hostname - if (spread.hostname.startsWith("[")) { - spread.hostname = spread.hostname.slice(1, -1); - } - // Ensure port is a number - if (spread.port !== "") { - spread.port = Number(spread.port); - } - // Concatenate path - spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; - - return spread; -} - -function removeMatchingHeaders(regex, headers) { - var lastValue; - for (var header in headers) { - if (regex.test(header)) { - lastValue = headers[header]; - delete headers[header]; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; } - } - return (lastValue === null || typeof lastValue === "undefined") ? - undefined : String(lastValue).trim(); -} - -function createErrorType(code, message, baseClass) { - // Create constructor - function CustomError(properties) { - Error.captureStackTrace(this, this.constructor); - Object.assign(this, properties || {}); - this.code = code; - this.message = this.cause ? message + ": " + this.cause.message : message; - } - - // Attach constructor and set default properties - CustomError.prototype = new (baseClass || Error)(); - Object.defineProperties(CustomError.prototype, { - constructor: { - value: CustomError, - enumerable: false, - }, - name: { - value: "Error [" + code + "]", - enumerable: false, - }, - }); - return CustomError; -} - -function destroyRequest(request, error) { - for (var event of events) { - request.removeListener(event, eventHandlers[event]); - } - request.on("error", noop); - request.destroy(error); -} - -function isSubdomain(subdomain, domain) { - assert(isString(subdomain) && isString(domain)); - var dot = subdomain.length - domain.length - 1; - return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); -} - -function isString(value) { - return typeof value === "string" || value instanceof String; } - -function isFunction(value) { - return typeof value === "function"; -} - -function isBuffer(value) { - return typeof value === "object" && ("length" in value); -} - -function isURL(value) { - return URL && value instanceof URL; -} - -// Exports -module.exports = wrap({ http: http, https: https }); -module.exports.wrap = wrap; +exports.DownscopedClient = DownscopedClient; /***/ }), -/***/ 6129: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 21380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2018 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.defaultErrorRedactor = exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; -const url_1 = __nccwpck_require__(7310); -const util_1 = __nccwpck_require__(1980); -const extend_1 = __importDefault(__nccwpck_require__(8171)); -/** - * Support `instanceof` operator for `GaxiosError`s in different versions of this library. - * - * @see {@link GaxiosError[Symbol.hasInstance]} - */ -exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); -/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ -class GaxiosError extends Error { - /** - * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. - * - * @see {@link GAXIOS_ERROR_SYMBOL} - * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} - */ - static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { - if (instance && - typeof instance === 'object' && - exports.GAXIOS_ERROR_SYMBOL in instance && - instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { - return true; - } - // fallback to native - return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); +exports.getEnv = exports.clear = exports.GCPEnv = void 0; +const gcpMetadata = __nccwpck_require__(3563); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +exports.clear = clear; +async function getEnv() { + if (envPromise) { + return envPromise; } - constructor(message, config, response, error) { - var _b; - super(message); - this.config = config; - this.response = response; - this.error = error; - /** - * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. - * - * @see {@link GAXIOS_ERROR_SYMBOL} - * @see {@link GaxiosError[Symbol.hasInstance]} - * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} - * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} - * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} - */ - this[_a] = util_1.pkg.version; - // deep-copy config as we do not want to mutate - // the existing config for future retries/use - this.config = (0, extend_1.default)(true, {}, config); - if (this.response) { - this.response.config = (0, extend_1.default)(true, {}, this.response.config); - } - if (this.response) { - try { - this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); - } - catch (_c) { - // best effort - don't throw an error within an error - // we could set `this.response.config.responseType = 'unknown'`, but - // that would mutate future calls with this config object. - } - this.status = this.response.status; + envPromise = getEnvMemoized(); + return envPromise; +} +exports.getEnv = getEnv; +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; } - if (error && 'code' in error && error.code) { - this.code = error.code; + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; } - if (config.errorRedactor) { - config.errorRedactor({ - config: this.config, - response: this.response, - }); + else { + env = GCPEnv.COMPUTE_ENGINE; } } -} -exports.GaxiosError = GaxiosError; -function translateData(responseType, data) { - switch (responseType) { - case 'stream': - return data; - case 'json': - return JSON.parse(JSON.stringify(data)); - case 'arraybuffer': - return JSON.parse(Buffer.from(data).toString('utf8')); - case 'blob': - return JSON.parse(data.text()); - default: - return data; + else { + env = GCPEnv.NONE; } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); } /** - * An experimental error redactor. - * - * @param config Config to potentially redact properties of - * @param response Config to potentially redact properties of - * - * @experimental + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. */ -function defaultErrorRedactor(data) { - const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; - function redactHeaders(headers) { - if (!headers) - return; - for (const key of Object.keys(headers)) { - // any casing of `Authentication` - if (/^authentication$/i.test(key)) { - headers[key] = REDACT; - } - // any casing of `Authorization` - if (/^authorization$/i.test(key)) { - headers[key] = REDACT; - } - // anything containing secret, such as 'client secret' - if (/secret/i.test(key)) { - headers[key] = REDACT; - } - } - } - function redactString(obj, key) { - if (typeof obj === 'object' && - obj !== null && - typeof obj[key] === 'string') { - const text = obj[key]; - if (/grant_type=/i.test(text) || - /assertion=/i.test(text) || - /secret/i.test(text)) { - obj[key] = REDACT; - } - } - } - function redactObject(obj) { - if (typeof obj === 'object' && obj !== null) { - if ('grant_type' in obj) { - obj['grant_type'] = REDACT; - } - if ('assertion' in obj) { - obj['assertion'] = REDACT; - } - if ('client_secret' in obj) { - obj['client_secret'] = REDACT; - } - } - } - if (data.config) { - redactHeaders(data.config.headers); - redactString(data.config, 'data'); - redactObject(data.config.data); - redactString(data.config, 'body'); - redactObject(data.config.body); - try { - const url = new url_1.URL('', data.config.url); - if (url.searchParams.has('token')) { - url.searchParams.set('token', REDACT); - } - if (url.searchParams.has('client_secret')) { - url.searchParams.set('client_secret', REDACT); - } - data.config.url = url.toString(); - } - catch (_b) { - // ignore error - no need to parse an invalid URL - } +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; } - if (data.response) { - defaultErrorRedactor({ config: data.response.config }); - redactHeaders(data.response.headers); - redactString(data.response, 'data'); - redactObject(data.response.data); + catch (e) { + return false; } - return data; } -exports.defaultErrorRedactor = defaultErrorRedactor; -//# sourceMappingURL=common.js.map +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} + /***/ }), -/***/ 8133: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 8749: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2022 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Gaxios = void 0; -const extend_1 = __importDefault(__nccwpck_require__(8171)); -const https_1 = __nccwpck_require__(5687); -const node_fetch_1 = __importDefault(__nccwpck_require__(467)); -const querystring_1 = __importDefault(__nccwpck_require__(3477)); -const is_stream_1 = __importDefault(__nccwpck_require__(1554)); -const url_1 = __nccwpck_require__(7310); -const common_1 = __nccwpck_require__(6129); -const retry_1 = __nccwpck_require__(1052); -const stream_1 = __nccwpck_require__(2781); -const uuid_1 = __nccwpck_require__(5840); -const interceptor_1 = __nccwpck_require__(4309); -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fetch = hasFetch() ? window.fetch : node_fetch_1.default; -function hasWindow() { - return typeof window !== 'undefined' && !!window; -} -function hasFetch() { - return hasWindow() && !!window.fetch; -} -function hasBuffer() { - return typeof Buffer !== 'undefined'; -} -function hasHeader(options, header) { - return !!getHeader(options, header); -} -function getHeader(options, header) { - header = header.toLowerCase(); - for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { - if (header === key.toLowerCase()) { - return options.headers[key]; - } - } - return undefined; -} -class Gaxios { - /** - * The Gaxios class is responsible for making HTTP requests. - * @param defaults The default set of options to be used for this instance. - */ - constructor(defaults) { - _Gaxios_instances.add(this); - this.agentCache = new Map(); - this.defaults = defaults || {}; - this.interceptors = { - request: new interceptor_1.GaxiosInterceptorManager(), - response: new interceptor_1.GaxiosInterceptorManager(), - }; - } - /** - * Perform an HTTP request with the given options. - * @param opts Set of HTTP options that will be used for this HTTP request. - */ - async request(opts = {}) { - opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); - opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); - return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); - } - async _defaultAdapter(opts) { - const fetchImpl = opts.fetchImplementation || fetch; - const res = (await fetchImpl(opts.url, opts)); - const data = await this.getResponseData(opts, res); - return this.translateResponse(opts, res, data); - } +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { /** - * Internal, retryable version of the `request` method. - * @param opts Set of HTTP options that will be used for this HTTP request. + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. */ - async _request(opts = {}) { - var _b; - try { - let translatedResponse; - if (opts.adapter) { - translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; } else { - translatedResponse = await this._defaultAdapter(opts); - } - if (!opts.validateStatus(translatedResponse.status)) { - if (opts.responseType === 'stream') { - let response = ''; - await new Promise(resolve => { - (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { - response += chunk; - }); - (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); - }); - translatedResponse.data = response; + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); } - throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + this.subjectToken = responseJson.id_token; } - return translatedResponse; } - catch (e) { - const err = e instanceof common_1.GaxiosError - ? e - : new common_1.GaxiosError(e.message, opts, undefined, e); - const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); - if (shouldRetry && config) { - err.config.retryConfig.currentRetryAttempt = - config.retryConfig.currentRetryAttempt; - // The error's config could be redacted - therefore we only want to - // copy the retry state over to the existing config - opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; - return this._request(opts); + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); } - throw err; - } - } - async getResponseData(opts, res) { - switch (opts.responseType) { - case 'stream': - return res.body; - case 'json': { - let data = await res.text(); - try { - data = JSON.parse(data); - } - catch (_b) { - // continue - } - return data; + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); } - case 'arraybuffer': - return res.arrayBuffer(); - case 'blob': - return res.blob(); - case 'text': - return res.text(); - default: - return this.getResponseDataFromContentType(res); + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; } } /** - * By default, throw for any non-2xx status code - * @param status status code from the HTTP response + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. */ - validateStatus(status) { - return status >= 200 && status < 300; + isValid() { + return !this.isExpired() && this.success; } /** - * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) - * @param params key value pars to encode + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. */ - paramsSerializer(params) { - return querystring_1.default.stringify(params); + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); } - translateResponse(opts, res, data) { - // headers need to be converted from a map to an obj - const headers = {}; - res.headers.forEach((value, key) => { - headers[key] = value; - }); - return { - config: opts, - data: data, - headers, - status: res.status, - statusText: res.statusText, - // XMLHttpRequestLike - request: { - responseURL: res.url, - }, - }; +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; + + +/***/ }), + +/***/ 38765: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(44627); +const oauth2common_1 = __nccwpck_require__(19510); +const gaxios_1 = __nccwpck_require__(59555); +const stream = __nccwpck_require__(12781); +const baseexternalclient_1 = __nccwpck_require__(40810); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { /** - * Attempts to parse a response by looking at the Content-Type header. - * @param {FetchResponse} response the HTTP response. - * @returns {Promise} a promise that resolves to the response data. + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. */ - async getResponseDataFromContentType(response) { - let contentType = response.headers.get('Content-Type'); - if (contentType === null) { - // Maintain existing functionality by calling text() - return response.text(); - } - contentType = contentType.toLowerCase(); - if (contentType.includes('application/json')) { - let data = await response.text(); - try { - data = JSON.parse(data); - } - catch (_b) { - // continue - } - return data; - } - else if (contentType.match(/^text\//)) { - return response.text(); - } - else { - // If the content type is something not easily handled, just return the raw data (blob) - return response.blob(); - } + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; } /** - * Creates an async generator that yields the pieces of a multipart/related request body. - * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive - * multipart/related requests are not currently supported. - * - * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. - * @param {string} boundary the boundary string to be placed between each part. + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. */ - async *getMultipartRequest(multipartOptions, boundary) { - const finale = `--${boundary}--`; - for (const currentPart of multipartOptions) { - const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; - const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; - yield preamble; - if (typeof currentPart.content === 'string') { - yield currentPart.content; - } - else { - yield* currentPart.content; - } - yield '\r\n'; - } - yield finale; - } -} -exports.Gaxios = Gaxios; -_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { - var _b, _c; - const candidate = new url_1.URL(url); - const noProxyList = [...noProxy]; - const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; - for (const rule of noProxyEnvList) { - noProxyList.push(rule.trim()); - } - for (const rule of noProxyList) { - // Match regex - if (rule instanceof RegExp) { - if (rule.test(candidate.toString())) { - return false; - } - } - // Match URL - else if (rule instanceof url_1.URL) { - if (rule.origin === candidate.origin) { - return false; - } + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; } - // Match string regex - else if (rule.startsWith('*.') || rule.startsWith('.')) { - const cleanedRule = rule.replace(/^\*\./, '.'); - if (candidate.hostname.endsWith(cleanedRule)) { - return false; + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); } - } - // Basic string match - else if (rule === candidate.origin || - rule === candidate.hostname || - rule === candidate.href) { - return false; + // Request could fail before the server responds. + throw error; } } - return true; -}, _Gaxios_applyRequestInterceptors = +} /** - * Applies the request interceptors. The request interceptors are applied after the - * call to prepareRequest is completed. - * - * @param {GaxiosOptions} options The current set of options. - * - * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. */ -async function _Gaxios_applyRequestInterceptors(options) { - let promiseChain = Promise.resolve(options); - for (const interceptor of this.interceptors.request.values()) { - if (interceptor) { - promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; } - } - return promiseChain; -}, _Gaxios_applyResponseInterceptors = -/** - * Applies the response interceptors. The response interceptors are applied after the - * call to request is made. - * - * @param {GaxiosOptions} options The current set of options. - * - * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. - */ -async function _Gaxios_applyResponseInterceptors(response) { - let promiseChain = Promise.resolve(response); - for (const interceptor of this.interceptors.response.values()) { - if (interceptor) { - promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; } - } - return promiseChain; -}, _Gaxios_prepareRequest = -/** - * Validates the options, merges them with defaults, and prepare request. - * - * @param options The original options passed from the client. - * @returns Prepared options, ready to make a request - */ -async function _Gaxios_prepareRequest(options) { - var _b, _c, _d, _e; - const opts = (0, extend_1.default)(true, {}, this.defaults, options); - if (!opts.url) { - throw new Error('URL is required.'); - } - // baseUrl has been deprecated, remove in 2.0 - const baseUrl = opts.baseUrl || opts.baseURL; - if (baseUrl) { - opts.url = baseUrl.toString() + opts.url; - } - opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; - if (opts.params && Object.keys(opts.params).length > 0) { - let additionalQueryParams = opts.paramsSerializer(opts.params); - if (additionalQueryParams.startsWith('?')) { - additionalQueryParams = additionalQueryParams.slice(1); + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; } - const prefix = opts.url.toString().includes('?') ? '&' : '?'; - opts.url = opts.url + prefix + additionalQueryParams; + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); } - if (typeof options.maxContentLength === 'number') { - opts.size = options.maxContentLength; + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; } - if (typeof options.maxRedirects === 'number') { - opts.follow = options.maxRedirects; + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); } - opts.headers = opts.headers || {}; - if (opts.multipart === undefined && opts.data) { - const isFormData = typeof FormData === 'undefined' - ? false - : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; - if (is_stream_1.default.readable(opts.data)) { - opts.body = opts.data; - } - else if (hasBuffer() && Buffer.isBuffer(opts.data)) { - // Do not attempt to JSON.stringify() a Buffer: - opts.body = opts.data; - if (!hasHeader(opts, 'Content-Type')) { - opts.headers['Content-Type'] = 'application/json'; - } - } - else if (typeof opts.data === 'object') { - // If www-form-urlencoded content type has been set, but data is - // provided as an object, serialize the content using querystring: - if (!isFormData) { - if (getHeader(opts, 'content-type') === - 'application/x-www-form-urlencoded') { - opts.body = opts.paramsSerializer(opts.data); - } - else { - // } else if (!(opts.data instanceof FormData)) { - if (!hasHeader(opts, 'Content-Type')) { - opts.headers['Content-Type'] = 'application/json'; - } - opts.body = JSON.stringify(opts.data); - } - } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); } else { - opts.body = opts.data; + return this.requestAsync(opts); } } - else if (opts.multipart && opts.multipart.length > 0) { - // note: once the minimum version reaches Node 16, - // this can be replaced with randomUUID() function from crypto - // and the dependency on UUID removed - const boundary = (0, uuid_1.v4)(); - opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; - const bodyStream = new stream_1.PassThrough(); - opts.body = bodyStream; - (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); - } - opts.validateStatus = opts.validateStatus || this.validateStatus; - opts.responseType = opts.responseType || 'unknown'; - if (!opts.headers['Accept'] && opts.responseType === 'json') { - opts.headers['Accept'] = 'application/json'; - } - opts.method = opts.method || 'GET'; - const proxy = opts.proxy || - ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || - ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || - ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || - ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); - const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); - if (opts.agent) { - // don't do any of the following options - use the user-provided agent. - } - else if (proxy && urlMayUseProxy) { - const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); - if (this.agentCache.has(proxy)) { - opts.agent = this.agentCache.get(proxy); + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); } - else { - opts.agent = new HttpsProxyAgent(proxy, { - cert: opts.cert, - key: opts.key, - }); - this.agentCache.set(proxy, opts.agent); + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; } + return response; } - else if (opts.cert && opts.key) { - // Configure client for mTLS - if (this.agentCache.has(opts.key)) { - opts.agent = this.agentCache.get(opts.key); - } - else { - opts.agent = new https_1.Agent({ - cert: opts.cert, - key: opts.key, - }); - this.agentCache.set(opts.key, opts.agent); + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; } + return this.cachedAccessToken; } - if (typeof opts.errorRedactor !== 'function' && - opts.errorRedactor !== false) { - opts.errorRedactor = common_1.defaultErrorRedactor; + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; } - return opts; -}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { - __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(7219)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); - return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); -}; -/** - * A cache for the lazily-loaded proxy agent. - * - * Should use {@link Gaxios[#getProxyAgent]} to retrieve. - */ -// using `import` to dynamically import the types here -_Gaxios_proxyAgent = { value: void 0 }; -//# sourceMappingURL=gaxios.js.map +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; + /***/ }), -/***/ 9555: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 94381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2021 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.request = exports.instance = exports.Gaxios = exports.GaxiosError = void 0; -const gaxios_1 = __nccwpck_require__(8133); -Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); -var common_1 = __nccwpck_require__(6129); -Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); -__exportStar(__nccwpck_require__(4309), exports); -/** - * The default instance used when the `request` method is directly - * invoked. - */ -exports.instance = new gaxios_1.Gaxios(); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(40810); +const identitypoolclient_1 = __nccwpck_require__(20117); +const awsclient_1 = __nccwpck_require__(71569); +const pluggable_auth_client_1 = __nccwpck_require__(44782); /** - * Make an HTTP request using the given options. - * @param opts Options for the request + * Dummy class with no constructor. Developers are expected to use fromJSON. */ -async function request(opts) { - return exports.instance.request(opts); +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } } -exports.request = request; -//# sourceMappingURL=index.js.map +exports.ExternalAccountClient = ExternalAccountClient; + /***/ }), -/***/ 4309: -/***/ ((__unused_webpack_module, exports) => { +/***/ 27646: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2024 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GaxiosInterceptorManager = void 0; +exports.FileSubjectTokenSupplier = void 0; +const util_1 = __nccwpck_require__(73837); +const fs = __nccwpck_require__(57147); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); /** - * Class to manage collections of GaxiosInterceptors for both requests and responses. + * Internal subject token supplier implementation used when a file location + * is configured in the credential configuration used to build an {@link IdentityPoolClient} */ -class GaxiosInterceptorManager extends Set { +class FileSubjectTokenSupplier { + /** + * Instantiates a new file based subject token supplier. + * @param opts The file subject token supplier options to build the supplier + * with. + */ + constructor(opts) { + this.filePath = opts.filePath; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + } + /** + * Returns the subject token stored at the file specified in the constructor. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + let parsedFilePath = this.filePath; + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + parsedFilePath = await realpath(parsedFilePath); + if (!(await lstat(parsedFilePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); + if (this.formatType === 'text') { + subjectToken = rawText; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } } -exports.GaxiosInterceptorManager = GaxiosInterceptorManager; -//# sourceMappingURL=interceptor.js.map +exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; + /***/ }), -/***/ 1052: -/***/ ((__unused_webpack_module, exports) => { +/***/ 20695: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2018 Google LLC +// Copyright 2019 Google LLC +// // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -23740,1206 +56135,854 @@ exports.GaxiosInterceptorManager = GaxiosInterceptorManager; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRetryConfig = void 0; -async function getRetryConfig(err) { - var _a; - let config = getConfig(err); - if (!err || !err.config || (!config && !err.config.retry)) { - return { shouldRetry: false }; +exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(32081); +const fs = __nccwpck_require__(57147); +const gcpMetadata = __nccwpck_require__(3563); +const os = __nccwpck_require__(22037); +const path = __nccwpck_require__(71017); +const crypto_1 = __nccwpck_require__(78043); +const transporters_1 = __nccwpck_require__(72649); +const computeclient_1 = __nccwpck_require__(96875); +const idtokenclient_1 = __nccwpck_require__(80298); +const envDetect_1 = __nccwpck_require__(21380); +const jwtclient_1 = __nccwpck_require__(13959); +const refreshclient_1 = __nccwpck_require__(98790); +const impersonated_1 = __nccwpck_require__(91103); +const externalclient_1 = __nccwpck_require__(94381); +const baseexternalclient_1 = __nccwpck_require__(40810); +const authclient_1 = __nccwpck_require__(44627); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(38765); +const util_1 = __nccwpck_require__(68905); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +const GoogleAuthExceptionMessages = { + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisify unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; } - config = config || {}; - config.currentRetryAttempt = config.currentRetryAttempt || 0; - config.retry = - config.retry === undefined || config.retry === null ? 3 : config.retry; - config.httpMethodsToRetry = config.httpMethodsToRetry || [ - 'GET', - 'HEAD', - 'PUT', - 'OPTIONS', - 'DELETE', - ]; - config.noResponseRetries = - config.noResponseRetries === undefined || config.noResponseRetries === null - ? 2 - : config.noResponseRetries; - // If this wasn't in the list of status codes where we want - // to automatically retry, return. - const retryRanges = [ - // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes - // 1xx - Retry (Informational, request still processing) - // 2xx - Do not retry (Success) - // 3xx - Do not retry (Redirect) - // 4xx - Do not retry (Client errors) - // 408 - Retry ("Request Timeout") - // 429 - Retry ("Too Many Requests") - // 5xx - Retry (Server errors) - [100, 199], - [408, 408], - [429, 429], - [500, 599], - ]; - config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; - // Put the config back into the err - err.config.retryConfig = config; - // Determine if we should retry the request - const shouldRetryFn = config.shouldRetry || shouldRetryRequest; - if (!(await shouldRetryFn(err))) { - return { shouldRetry: false, config: err.config }; + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts) { + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + this.clientOptions = {}; + opts = opts || {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.jsonContent = opts.credentials || null; + this.clientOptions = opts.clientOptions || {}; + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } } - // Calculate time to wait with exponential backoff. - // If this is the first retry, look for a configured retryDelay. - const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; - // Formula: retryDelay + ((2^c - 1 / 2) * 1000) - const delay = retryDelay + ((Math.pow(2, config.currentRetryAttempt) - 1) / 2) * 1000; - // We're going to retry! Incremenent the counter. - err.config.retryConfig.currentRetryAttempt += 1; - // Create a promise that invokes the retry after the backOffDelay - const backoff = config.retryBackoff - ? config.retryBackoff(err, delay) - : new Promise(resolve => { - setTimeout(resolve, delay); - }); - // Notify the user if they added an `onRetryAttempt` handler - if (config.onRetryAttempt) { - config.onRetryAttempt(err); + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; } - // Return the promise in which recalls Gaxios to retry the request - await backoff; - return { shouldRetry: true, config: err.config }; -} -exports.getRetryConfig = getRetryConfig; -/** - * Determine based on config if we should retry the request. - * @param err The GaxiosError passed to the interceptor. - */ -function shouldRetryRequest(err) { - var _a; - const config = getConfig(err); - // node-fetch raises an AbortError if signaled: - // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal - if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { - return false; + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } } - // If there's no config, or retries are disabled, return. - if (!config || config.retry === 0) { - return false; + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } } - // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) - if (!err.response && - (config.currentRetryAttempt || 0) >= config.noResponseRetries) { - return false; + /* + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } } - // Only retry with configured HttpMethods. - if (!err.config.method || - config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { - return false; + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe-domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } } - // If this wasn't in the list of status codes where we want - // to automatically retry, return. - if (err.response && err.response.status) { - let isInRange = false; - for (const [min, max] of config.statusCodesToRetry) { - const status = err.response.status; - if (status >= min && status <= max) { - isInRange = true; - break; + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + return await this.prepareAndCacheADC(this.cachedCredential); + } + // Since this is a 'new' ADC to cache we will use the environment variable + // if it's available. We prefer this value over the value from ADC. + const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); } - if (!isInRange) { - return false; + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + // set universe domain for Compute client + if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) { + options.universeDomain = + await this.getUniverseDomainFromMetadataServer(); + } + options.scopes = this.getAnyScopes(); + return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); + } + throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); } - // If we are out of retry attempts, return - config.currentRetryAttempt = config.currentRetryAttempt || 0; - if (config.currentRetryAttempt >= config.retry) { - return false; - } - return true; -} -/** - * Acquire the raxConfig object from an GaxiosError if available. - * @param err The Gaxios error with a config object. - */ -function getConfig(err) { - if (err && err.config && err.config.retryConfig) { - return err.config.retryConfig; + async prepareAndCacheADC(credential, quotaProjectIdOverride) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; } - return; -} -//# sourceMappingURL=retry.js.map - -/***/ }), - -/***/ 1980: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2023 Google LLC -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.pkg = void 0; -exports.pkg = __nccwpck_require__(6318); -//# sourceMappingURL=util.js.map - -/***/ }), - -/***/ 1904: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.detectGCPResidency = exports.isGoogleComputeEngine = exports.isGoogleComputeEngineMACAddress = exports.isGoogleComputeEngineLinux = exports.isGoogleCloudServerless = exports.GCE_LINUX_BIOS_PATHS = void 0; -const fs_1 = __nccwpck_require__(7147); -const os_1 = __nccwpck_require__(2037); -/** - * Known paths unique to Google Compute Engine Linux instances - */ -exports.GCE_LINUX_BIOS_PATHS = { - BIOS_DATE: '/sys/class/dmi/id/bios_date', - BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', -}; -const GCE_MAC_ADDRESS_REGEX = /^42:01/; -/** - * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). - * - * Uses the: - * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. - * - * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. - */ -function isGoogleCloudServerless() { /** - * `CLOUD_RUN_JOB` is used for Cloud Run Jobs - * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - * `FUNCTION_NAME` is used in older Cloud Functions environments: - * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. * - * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: - * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. - * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + * @returns A promise that resolves with the boolean. + * @api private */ - const isGFEnvironment = process.env.CLOUD_RUN_JOB || - process.env.FUNCTION_NAME || - process.env.K_SERVICE; - return !!isGFEnvironment; -} -exports.isGoogleCloudServerless = isGoogleCloudServerless; -/** - * Determines if the process is running on a Linux Google Compute Engine instance. - * - * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. - */ -function isGoogleComputeEngineLinux() { - if ((0, os_1.platform)() !== 'linux') - return false; - try { - // ensure this file exist - (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); - // ensure this file exist and matches - const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); - return /Google/.test(biosVendor); - } - catch (_a) { - return false; + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; } -} -exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; -/** - * Determines if the process is running on a Google Compute Engine instance with a known - * MAC address. - * - * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. - */ -function isGoogleComputeEngineMACAddress() { - const interfaces = (0, os_1.networkInterfaces)(); - for (const item of Object.values(interfaces)) { - if (!item) - continue; - for (const { mac } of item) { - if (GCE_MAC_ADDRESS_REGEX.test(mac)) { - return true; + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; } + throw e; } } - return false; -} -exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; -/** - * Determines if the process is running on a Google Compute Engine instance. - * - * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. - */ -function isGoogleComputeEngine() { - return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); -} -exports.isGoogleComputeEngine = isGoogleComputeEngine; -/** - * Determines if the process is running on Google Cloud Platform. - * - * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. - */ -function detectGCPResidency() { - return isGoogleCloudServerless() || isGoogleComputeEngine(); -} -exports.detectGCPResidency = detectGCPResidency; -//# sourceMappingURL=gcp-residency.js.map - -/***/ }), - -/***/ 3563: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.requestTimeout = exports.setGCPResidency = exports.getGCPResidency = exports.gcpResidencyCache = exports.resetIsAvailableCache = exports.isAvailable = exports.bulk = exports.universe = exports.project = exports.instance = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const jsonBigint = __nccwpck_require__(5031); -const gcp_residency_1 = __nccwpck_require__(1904); -exports.BASE_PATH = '/computeMetadata/v1'; -exports.HOST_ADDRESS = 'http://169.254.169.254'; -exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; -exports.HEADER_NAME = 'Metadata-Flavor'; -exports.HEADER_VALUE = 'Google'; -exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); -/** - * Metadata server detection override options. - * - * Available via `process.env.METADATA_SERVER_DETECTION`. - */ -exports.METADATA_SERVER_DETECTION = Object.freeze({ - 'assume-present': "don't try to ping the metadata server, but assume it's present", - none: "don't try to ping the metadata server, but don't try to use it either", - 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", - 'ping-only': 'skip the BIOS probe, and go straight to pinging', -}); -/** - * Returns the base URL while taking into account the GCE_METADATA_HOST - * environment variable if it exists. - * - * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. - */ -function getBaseUrl(baseUrl) { - if (!baseUrl) { - baseUrl = - process.env.GCE_METADATA_IP || - process.env.GCE_METADATA_HOST || - exports.HOST_ADDRESS; + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; } - // If no scheme is provided default to HTTP: - if (!/^https?:\/\//.test(baseUrl)) { - baseUrl = `http://${baseUrl}`; + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); } - return new URL(exports.BASE_PATH, baseUrl).href; -} -// Accepts an options object passed from the user to the API. In previous -// versions of the API, it referred to a `Request` or an `Axios` request -// options object. Now it refers to an object with very limited property -// names. This is here to help ensure users don't pass invalid options when -// they upgrade from 0.4 to 0.5 to 0.8. -function validate(options) { - Object.keys(options).forEach(key => { - switch (key) { - case 'params': - case 'property': - case 'headers': - break; - case 'qs': - throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); - default: - throw new Error(`'${key}' is not a valid configuration option.`); + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d, _e; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); } - }); -} -async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { - let metadataKey = ''; - let params = {}; - let headers = {}; - if (typeof type === 'object') { - const metadataAccessor = type; - metadataKey = metadataAccessor.metadataKey; - params = metadataAccessor.params || params; - headers = metadataAccessor.headers || headers; - noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; - fastFail = metadataAccessor.fastFail || fastFail; + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + // Create source client for impersonation + const sourceClient = new refreshclient_1.UserRefreshClient(); + sourceClient.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extreact service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + const client = new impersonated_1.Impersonated({ + ...json, + delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [], + sourceClient: sourceClient, + targetPrincipal: targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + return client; } - else { - metadataKey = type; + /** + * Create a credentials instance using the given input options. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; } - if (typeof options === 'string') { - metadataKey += `/${options}`; + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; } - else { - validate(options); - if (options.property) { - metadataKey += `/${options.property}`; + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; } - headers = options.headers || headers; - params = options.params || params; - } - try { - const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; - const res = await requestMethod({ - url: `${getBaseUrl()}/${metadataKey}`, - headers: { ...exports.HEADERS, ...headers }, - retryConfig: { noResponseRetries }, - params, - responseType: 'text', - timeout: requestTimeout(), - }); - // NOTE: node.js converts all incoming headers to lower case. - if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { - throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header.`); + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); } - else if (!res.data) { - throw new Error('Invalid response from the metadata service'); + else { + return this.fromStreamAsync(inputStream, options); } - if (typeof res.data === 'string') { - try { - return jsonBigint.parse(res.data); + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); } - catch (_a) { - /* ignore */ + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(s); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options) { + options = options || {}; + const client = new jwtclient_1.JWT(options); + client.fromAPIKey(apiKey); + return client; + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; } } - return res.data; + return false; } - catch (e) { - const err = e; - if (err.response && err.response.status !== 200) { - err.message = `Unsuccessful response status code. ${err.message}`; - } - throw e; + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); } -} -async function fastFailMetadataRequest(options) { - const secondaryOptions = { - ...options, - url: options.url.replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), - }; - // We race a connection between DNS/IP to metadata server. There are a couple - // reasons for this: - // - // 1. the DNS is slow in some GCP environments; by checking both, we might - // detect the runtime environment signficantly faster. - // 2. we can't just check the IP, which is tarpitted and slow to respond - // on a user's local machine. - // - // Additional logic has been added to make sure that we don't create an - // unhandled rejection in scenarios where a failure happens sometime - // after a success. - // - // Note, however, if a failure happens prior to a success, a rejection should - // occur, this is for folks running locally. - // - let responded = false; - const r1 = (0, gaxios_1.request)(options) - .then(res => { - responded = true; - return res; - }) - .catch(err => { - if (responded) { - return r2; + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; } - else { - responded = true; - throw err; + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } } - }); - const r2 = (0, gaxios_1.request)(secondaryOptions) - .then(res => { - responded = true; - return res; - }) - .catch(err => { - if (responded) { - return r1; + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; } else { - responded = true; - throw err; - } - }); - return Promise.race([r1, r2]); -} -/** - * Obtain metadata for the current GCE instance. - * - * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} - * - * @example - * ``` - * const serviceAccount: {} = await instance('service-accounts/'); - * const serviceAccountEmail: string = await instance('service-accounts/default/email'); - * ``` - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function instance(options) { - return metadataAccessor('instance', options); -} -exports.instance = instance; -/** - * Obtain metadata for the current GCP project. - * - * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} - * - * @example - * ``` - * const projectId: string = await project('project-id'); - * const numericProjectId: number = await project('numeric-project-id'); - * ``` - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function project(options) { - return metadataAccessor('project', options); -} -exports.project = project; -/** - * Obtain metadata for the current universe. - * - * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} - * - * @example - * ``` - * const universeDomain: string = await universe('universe_domain'); - * ``` - */ -function universe(options) { - return metadataAccessor('universe', options); -} -exports.universe = universe; -/** - * Retrieve metadata items in parallel. - * - * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} - * - * @example - * ``` - * const data = await bulk([ - * { - * metadataKey: 'instance', - * }, - * { - * metadataKey: 'project/project-id', - * }, - * ] as const); - * - * // data.instance; - * // data['project/project-id']; - * ``` - * - * @param properties The metadata properties to retrieve - * @returns The metadata in `metadatakey:value` format - */ -async function bulk(properties) { - const r = {}; - await Promise.all(properties.map(item => { - return (async () => { - const res = await metadataAccessor(item); - const key = item.metadataKey; - r[key] = res; - })(); - })); - return r; -} -exports.bulk = bulk; -/* - * How many times should we retry detecting GCP environment. - */ -function detectGCPAvailableRetries() { - return process.env.DETECT_GCP_RETRIES - ? Number(process.env.DETECT_GCP_RETRIES) - : 0; -} -let cachedIsAvailableResponse; -/** - * Determine if the metadata server is currently available. - */ -async function isAvailable() { - if (process.env.METADATA_SERVER_DETECTION) { - const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); - if (!(value in exports.METADATA_SERVER_DETECTION)) { - throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); - } - switch (value) { - case 'assume-present': - return true; - case 'none': - return false; - case 'bios-only': - return getGCPResidency(); - case 'ping-only': - // continue, we want to ping the server + return null; } } - try { - // If a user is instantiating several GCP libraries at the same time, - // this may result in multiple calls to isAvailable(), to detect the - // runtime environment. We use the same promise for each of these calls - // to reduce the network load. - if (cachedIsAvailableResponse === undefined) { - cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), - // If the default HOST_ADDRESS has been overridden, we should not - // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in - // a non-GCP environment): - !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; } - await cachedIsAvailableResponse; - return true; + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); } - catch (e) { - const err = e; - if (process.env.DEBUG_AUTH) { - console.info(err); + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; } - if (err.type === 'request-timeout') { - // If running in a GCP environment, metadata endpoint should return - // within ms. - return false; + catch (e) { + // Ignore any errors + return null; } - if (err.response && err.response.status === 404) { - return false; + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); } else { - if (!(err.response && err.response.status === 404) && - // A warning is emitted if we see an unexpected err.code, or err.code - // is not populated: - (!err.code || - ![ - 'EHOSTDOWN', - 'EHOSTUNREACH', - 'ENETUNREACH', - 'ENOENT', - 'ENOTFOUND', - 'ECONNREFUSED', - ].includes(err.code))) { - let code = 'UNKNOWN'; - if (err.code) - code = err.code; - process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); - } - // Failure to resolve the metadata service means that it is not available. - return false; + return this.getCredentialsAsync(); } } -} -exports.isAvailable = isAvailable; -/** - * reset the memoized isAvailable() lookup. - */ -function resetIsAvailableCache() { - cachedIsAvailableResponse = undefined; -} -exports.resetIsAvailableCache = resetIsAvailableCache; -/** - * A cache for the detected GCP Residency. - */ -exports.gcpResidencyCache = null; -/** - * Detects GCP Residency. - * Caches results to reduce costs for subsequent calls. - * - * @see setGCPResidency for setting - */ -function getGCPResidency() { - if (exports.gcpResidencyCache === null) { - setGCPResidency(); - } - return exports.gcpResidencyCache; -} -exports.getGCPResidency = getGCPResidency; -/** - * Sets the detected GCP Residency. - * Useful for forcing metadata server detection behavior. - * - * Set `null` to autodetect the environment (default behavior). - * @see getGCPResidency for getting - */ -function setGCPResidency(value = null) { - exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); -} -exports.setGCPResidency = setGCPResidency; -/** - * Obtain the timeout for requests to the metadata server. - * - * In certain environments and conditions requests can take longer than - * the default timeout to complete. This function will determine the - * appropriate timeout based on the environment. - * - * @returns {number} a request timeout duration in milliseconds. - */ -function requestTimeout() { - return getGCPResidency() ? 0 : 3000; -} -exports.requestTimeout = requestTimeout; -__exportStar(__nccwpck_require__(1904), exports); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 4627: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2012 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; -const events_1 = __nccwpck_require__(2361); -const gaxios_1 = __nccwpck_require__(9555); -const transporters_1 = __nccwpck_require__(2649); -const util_1 = __nccwpck_require__(8905); -/** - * The default cloud universe - * - * @see {@link AuthJSONOptions.universe_domain} - */ -exports.DEFAULT_UNIVERSE = 'googleapis.com'; -/** - * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} - */ -exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; -class AuthClient extends events_1.EventEmitter { - constructor(opts = {}) { - var _a, _b, _c, _d, _e; - super(); - this.credentials = {}; - this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; - this.forceRefreshOnFailure = false; - this.universeDomain = exports.DEFAULT_UNIVERSE; - const options = (0, util_1.originalOrCamelOptions)(opts); - // Shared auth options - this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; - this.quotaProjectId = options.get('quota_project_id'); - this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; - this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; - // Shared client options - this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); - if (opts.transporterOptions) { - this.transporter.defaults = opts.transporterOptions; + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; } - if (opts.eagerRefreshThresholdMillis) { - this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (!this.cachedCredential) { + if (this.jsonContent) { + this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + await this.fromStreamAsync(stream, this.clientOptions); + } + else { + await this.getApplicationDefaultAsync(this.clientOptions); + } } - this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + return this.cachedCredential; } /** - * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. - * - * @expiremental + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. */ - get gaxios() { - if (this.transporter instanceof gaxios_1.Gaxios) { - return this.transporter; - } - else if (this.transporter instanceof transporters_1.DefaultTransporter) { - return this.transporter.instance; - } - else if ('instance' in this.transporter && - this.transporter.instance instanceof gaxios_1.Gaxios) { - return this.transporter.instance; + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); } - return null; + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } /** - * Sets the auth credentials. + * Automatically obtain application default credentials, and return + * an access token for making requests. */ - setCredentials(credentials) { - this.credentials = credentials; + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; } /** - * Append additional headers, e.g., x-goog-user-project, shared across the - * classes inheriting AuthClient. This method should be used by any method - * that overrides getRequestMetadataAsync(), which is a shared helper for - * setting request information in both gRPC and HTTP API calls. - * - * @param headers object to append additional headers to. + * Obtain the HTTP headers that will provide authorization for a given + * request. */ - addSharedMetadataHeaders(headers) { - // quota_project_id, stored in application_default_credentials.json, is set in - // the x-goog-user-project header, to indicate an alternate account for - // billing and quota: - if (!headers['x-goog-user-project'] && // don't override a value the user sets. - this.quotaProjectId) { - headers['x-goog-user-project'] = this.quotaProjectId; - } - return headers; + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); } /** - * Retry config for Auth-related requests. - * - * @remarks - * - * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} - * config as some downstream APIs would prefer if customers explicitly enable retries, - * such as GCS. + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers */ - static get RETRY_CONFIG() { - return { - retry: true, - retryConfig: { - httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], - }, - }; + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; } -} -exports.AuthClient = AuthClient; - - -/***/ }), - -/***/ 1569: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AwsClient = void 0; -const awsrequestsigner_1 = __nccwpck_require__(1754); -const baseexternalclient_1 = __nccwpck_require__(7391); -const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(9799); -const util_1 = __nccwpck_require__(8905); -/** - * AWS external account client. This is used for AWS workloads, where - * AWS STS GetCallerIdentity serialized signed requests are exchanged for - * GCP access token. - */ -class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { /** - * Instantiates an AwsClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid AWS credential. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions **DEPRECATED, all options are available in the - * `options` parameter.** Optional additional behavior customization options. - * These currently customize expiration threshold time and whether to retry - * on 401/403 API request errors. + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - const opts = (0, util_1.originalOrCamelOptions)(options); - const credentialSource = opts.get('credential_source'); - const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); - // Validate credential sourcing configuration. - if (!credentialSource && !awsSecurityCredentialsSupplier) { - throw new Error('A credential source or AWS security credentials supplier must be specified.'); - } - if (credentialSource && awsSecurityCredentialsSupplier) { - throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); - } - if (awsSecurityCredentialsSupplier) { - this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; - this.regionalCredVerificationUrl = - __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); - this.credentialSourceType = 'programmatic'; - } - else { - const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); - this.environmentId = credentialSourceOpts.get('environment_id'); - // This is only required if the AWS region is not available in the - // AWS_REGION or AWS_DEFAULT_REGION environment variables. - const regionUrl = credentialSourceOpts.get('region_url'); - // This is only required if AWS security credentials are not available in - // environment variables. - const securityCredentialsUrl = credentialSourceOpts.get('url'); - const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); - this.awsSecurityCredentialsSupplier = - new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ - regionUrl: regionUrl, - securityCredentialsUrl: securityCredentialsUrl, - imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, - }); - this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); - this.credentialSourceType = 'aws'; - // Data validators. - this.validateEnvironmentId(); - } - this.awsRequestSigner = null; - this.region = ''; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); } - validateEnvironmentId() { - var _b; - const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); - if (!match || !this.regionalCredVerificationUrl) { - throw new Error('No valid AWS "credential_source" provided'); - } - else if (parseInt(match[2], 10) !== 1) { - throw new Error(`aws version "${match[2]}" is not supported in the current build.`); - } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); } /** - * Triggered when an external subject token is needed to be exchanged for a - * GCP access token via GCP STS endpoint. This will call the - * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS - * Security Credentials, then use them to create a signed AWS STS request that - * can be exchanged for a GCP access token. - * @return A promise that resolves with the external subject token. + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` */ - async retrieveSubjectToken() { - // Initialize AWS request signer if not already initialized. - if (!this.awsRequestSigner) { - this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); - this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { - return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); - }, this.region); + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; } - // Generate signed request to AWS STS GetCallerIdentity API. - // Use the required regional endpoint. Otherwise, the request will fail. - const options = await this.awsRequestSigner.getRequestOptions({ - ..._a.RETRY_CONFIG, - url: this.regionalCredVerificationUrl.replace('{region}', this.region), + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + retry: true, + retryConfig: { + httpMethodsToRetry: ['POST'], + }, }); - // The GCP STS endpoint expects the headers to be formatted as: - // [ - // {key: 'x-amz-date', value: '...'}, - // {key: 'Authorization', value: '...'}, - // ... - // ] - // And then serialized as: - // encodeURIComponent(JSON.stringify({ - // url: '...', - // method: 'POST', - // headers: [{key: 'x-amz-date', value: '...'}, ...] - // })) - const reformattedHeader = []; - const extendedHeaders = Object.assign({ - // The full, canonical resource name of the workload identity pool - // provider, with or without the HTTPS prefix. - // Including this header as part of the signature is recommended to - // ensure data integrity. - 'x-goog-cloud-target-resource': this.audience, - }, options.headers); - // Reformat header to GCP STS expected format. - for (const key in extendedHeaders) { - reformattedHeader.push({ - key, - value: extendedHeaders[key], - }); - } - // Serialize the reformatted signed request. - return encodeURIComponent(JSON.stringify({ - url: options.url, - method: options.method, - headers: reformattedHeader, - })); + return res.data.signedBlob; } } -exports.AwsClient = AwsClient; -_a = AwsClient; -_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; -/** - * @deprecated AWS client no validates the EC2 metadata address. - **/ -AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +exports.GoogleAuth = GoogleAuth; /** - * @deprecated AWS client no validates the EC2 metadata address. - **/ -AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; /***/ }), -/***/ 1754: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 39735: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.AwsRequestSigner = void 0; -const crypto_1 = __nccwpck_require__(8043); -/** AWS Signature Version 4 signing algorithm identifier. */ -const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; -/** - * The termination string for the AWS credential scope value as defined in - * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html - */ -const AWS_REQUEST_TYPE = 'aws4_request'; -/** - * Implements an AWS API request signer based on the AWS Signature Version 4 - * signing process. - * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html - */ -class AwsRequestSigner { - /** - * Instantiates an AWS API request signer used to send authenticated signed - * requests to AWS APIs based on the AWS Signature Version 4 signing process. - * This also provides a mechanism to generate the signed request without - * sending it. - * @param getCredentials A mechanism to retrieve AWS security credentials - * when needed. - * @param region The AWS region to use. - */ - constructor(getCredentials, region) { - this.getCredentials = getCredentials; - this.region = region; - this.crypto = (0, crypto_1.createCrypto)(); - } - /** - * Generates the signed request for the provided HTTP request for calling - * an AWS API. This follows the steps described at: - * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html - * @param amzOptions The AWS request options that need to be signed. - * @return A promise that resolves with the GaxiosOptions containing the - * signed HTTP request parameters. - */ - async getRequestOptions(amzOptions) { - if (!amzOptions.url) { - throw new Error('"url" is required in "amzOptions"'); - } - // Stringify JSON requests. This will be set in the request body of the - // generated signed request. - const requestPayloadData = typeof amzOptions.data === 'object' - ? JSON.stringify(amzOptions.data) - : amzOptions.data; - const url = amzOptions.url; - const method = amzOptions.method || 'GET'; - const requestPayload = amzOptions.body || requestPayloadData; - const additionalAmzHeaders = amzOptions.headers; - const awsSecurityCredentials = await this.getCredentials(); - const uri = new URL(url); - const headerMap = await generateAuthenticationHeaderMap({ - crypto: this.crypto, - host: uri.host, - canonicalUri: uri.pathname, - canonicalQuerystring: uri.search.substr(1), - method, - region: this.region, - securityCredentials: awsSecurityCredentials, - requestPayload, - additionalAmzHeaders, - }); - // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. - const headers = Object.assign( - // Add x-amz-date if available. - headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { - Authorization: headerMap.authorizationHeader, - host: uri.host, - }, additionalAmzHeaders || {}); - if (awsSecurityCredentials.token) { - Object.assign(headers, { - 'x-amz-security-token': awsSecurityCredentials.token, - }); - } - const awsSignedReq = { - url, - method: method, - headers, - }; - if (typeof requestPayload !== 'undefined') { - awsSignedReq.body = requestPayload; - } - return awsSignedReq; - } -} -exports.AwsRequestSigner = AwsRequestSigner; -/** - * Creates the HMAC-SHA256 hash of the provided message using the - * provided key. - * - * @param crypto The crypto instance used to facilitate cryptographic - * operations. - * @param key The HMAC-SHA256 key to use. - * @param msg The message to hash. - * @return The computed hash bytes. - */ -async function sign(crypto, key, msg) { - return await crypto.signWithHmacSha256(key, msg); -} -/** - * Calculates the signing key used to calculate the signature for - * AWS Signature Version 4 based on: - * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html - * - * @param crypto The crypto instance used to facilitate cryptographic - * operations. - * @param key The AWS secret access key. - * @param dateStamp The '%Y%m%d' date format. - * @param region The AWS region. - * @param serviceName The AWS service name, eg. sts. - * @return The signing key bytes. - */ -async function getSigningKey(crypto, key, dateStamp, region, serviceName) { - const kDate = await sign(crypto, `AWS4${key}`, dateStamp); - const kRegion = await sign(crypto, kDate, region); - const kService = await sign(crypto, kRegion, serviceName); - const kSigning = await sign(crypto, kService, 'aws4_request'); - return kSigning; -} -/** - * Generates the authentication header map needed for generating the AWS - * Signature Version 4 signed request. - * - * @param option The options needed to compute the authentication header map. - * @return The AWS authentication header map which constitutes of the following - * components: amz-date, authorization header and canonical query string. - */ -async function generateAuthenticationHeaderMap(options) { - const additionalAmzHeaders = options.additionalAmzHeaders || {}; - const requestPayload = options.requestPayload || ''; - // iam.amazonaws.com host => iam service. - // sts.us-east-2.amazonaws.com => sts service. - const serviceName = options.host.split('.')[0]; - const now = new Date(); - // Format: '%Y%m%dT%H%M%SZ'. - const amzDate = now - .toISOString() - .replace(/[-:]/g, '') - .replace(/\.[0-9]+/, ''); - // Format: '%Y%m%d'. - const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); - // Change all additional headers to be lower case. - const reformattedAdditionalAmzHeaders = {}; - Object.keys(additionalAmzHeaders).forEach(key => { - reformattedAdditionalAmzHeaders[key.toLowerCase()] = - additionalAmzHeaders[key]; - }); - // Add AWS token if available. - if (options.securityCredentials.token) { - reformattedAdditionalAmzHeaders['x-amz-security-token'] = - options.securityCredentials.token; - } - // Header keys need to be sorted alphabetically. - const amzHeaders = Object.assign({ - host: options.host, - }, - // Previously the date was not fixed with x-amz- and could be provided manually. - // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req - reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); - let canonicalHeaders = ''; - const signedHeadersList = Object.keys(amzHeaders).sort(); - signedHeadersList.forEach(key => { - canonicalHeaders += `${key}:${amzHeaders[key]}\n`; - }); - const signedHeaders = signedHeadersList.join(';'); - const payloadHash = await options.crypto.sha256DigestHex(requestPayload); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html - const canonicalRequest = `${options.method}\n` + - `${options.canonicalUri}\n` + - `${options.canonicalQuerystring}\n` + - `${canonicalHeaders}\n` + - `${signedHeaders}\n` + - `${payloadHash}`; - const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; - // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html - const stringToSign = `${AWS_ALGORITHM}\n` + - `${amzDate}\n` + - `${credentialScope}\n` + - (await options.crypto.sha256DigestHex(canonicalRequest)); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html - const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); - const signature = await sign(options.crypto, signingKey, stringToSign); - // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html - const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + - `${credentialScope}, SignedHeaders=${signedHeaders}, ` + - `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; - return { - // Do not return x-amz-date if date is available. - amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, - authorizationHeader, - canonicalQuerystring: options.canonicalQuerystring, - }; +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } } +exports.IAMAuth = IAMAuth; /***/ }), -/***/ 7391: +/***/ 20117: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -24958,64 +57001,22 @@ async function generateAuthenticationHeaderMap(options) { // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; -const stream = __nccwpck_require__(2781); -const authclient_1 = __nccwpck_require__(4627); -const sts = __nccwpck_require__(6308); -const util_1 = __nccwpck_require__(8905); -/** - * The required token exchange grant_type: rfc8693#section-2.1 - */ -const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; -/** - * The requested token exchange requested_token_type: rfc8693#section-2.1 - */ -const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** The default OAuth scope to request when none is provided. */ -const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; -/** Default impersonated token lifespan in seconds.*/ -const DEFAULT_TOKEN_LIFESPAN = 3600; -/** - * Offset to take into account network delays and server clock skews. - */ -exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; -/** - * The credentials JSON file type for external account clients. - * There are 3 types of JSON configs: - * 1. authorized_user => Google end user credential - * 2. service_account => Google service account credential - * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) - */ -exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; -/** - * Cloud resource manager URL used to retrieve project information. - * - * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead - **/ -exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; -/** The workforce audience pattern. */ -const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; -const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const pkg = __nccwpck_require__(1402); -/** - * For backwards compatibility. - */ -var authclient_2 = __nccwpck_require__(4627); -Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +exports.IdentityPoolClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(40810); +const util_1 = __nccwpck_require__(68905); +const filesubjecttokensupplier_1 = __nccwpck_require__(27646); +const urlsubjecttokensupplier_1 = __nccwpck_require__(7428); /** - * Base external account client. This is used to instantiate AuthClients for - * exchanging external account credentials for GCP access token and authorizing - * requests to GCP APIs. - * The base class implements common logic for exchanging various type of - * external credentials for GCP access token. The logic of determining and - * retrieving the external credential based on the environment and - * credential_source will be left for the subclasses. + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. */ -class BaseExternalAccountClient extends authclient_1.AuthClient { +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { /** - * Instantiate a BaseExternalAccountClient instance using the provided JSON + * Instantiate an IdentityPoolClient instance using the provided JSON * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. * @param options The external account options object typically loaded * from the external account JSON credential file. The camelCased options * are aliases for the snake_cased options. @@ -25025,499 +57026,327 @@ class BaseExternalAccountClient extends authclient_1.AuthClient { * on 401/403 API request errors. */ constructor(options, additionalOptions) { - var _a; - super({ ...options, ...additionalOptions }); + super(options, additionalOptions); const opts = (0, util_1.originalOrCamelOptions)(options); - const type = opts.get('type'); - if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { - throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + - `received "${options.type}"`); - } - const clientId = opts.get('client_id'); - const clientSecret = opts.get('client_secret'); - const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); - const subjectTokenType = opts.get('subject_token_type'); - const workforcePoolUserProject = opts.get('workforce_pool_user_project'); - const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); - const serviceAccountImpersonation = opts.get('service_account_impersonation'); - const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); - this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || - `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); - if (clientId) { - this.clientAuth = { - confidentialClientType: 'basic', - clientId, - clientSecret, - }; + const credentialSource = opts.get('credential_source'); + const subjectTokenSupplier = opts.get('subject_token_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !subjectTokenSupplier) { + throw new Error('A credential source or subject token supplier must be specified.'); } - this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); - this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; - this.cachedAccessToken = null; - this.audience = opts.get('audience'); - this.subjectTokenType = subjectTokenType; - this.workforcePoolUserProject = workforcePoolUserProject; - const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); - if (this.workforcePoolUserProject && - !this.audience.match(workforceAudiencePattern)) { - throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + - 'credentials.'); + if (credentialSource && subjectTokenSupplier) { + throw new Error('Only one of credential source or subject token supplier can be specified.'); } - this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; - this.serviceAccountImpersonationLifetime = - serviceAccountImpersonationLifetime; - if (this.serviceAccountImpersonationLifetime) { - this.configLifetimeRequested = true; + if (subjectTokenSupplier) { + this.subjectTokenSupplier = subjectTokenSupplier; + this.credentialSourceType = 'programmatic'; } else { - this.configLifetimeRequested = false; - this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; - } - this.projectNumber = this.getProjectNumber(this.audience); - this.supplierContext = { - audience: this.audience, - subjectTokenType: this.subjectTokenType, - transporter: this.transporter, - }; - } - /** The service account email to be impersonated, if available. */ - getServiceAccountEmail() { - var _a; - if (this.serviceAccountImpersonationUrl) { - if (this.serviceAccountImpersonationUrl.length > 256) { - /** - * Prevents DOS attacks. - * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} - **/ - throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + const formatType = formatOpts.get('type') || 'text'; + const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (formatType !== 'json' && formatType !== 'text') { + throw new Error(`Invalid credential_source format "${formatType}"`); } - // Parse email from URL. The formal looks as follows: - // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken - const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; - const result = re.exec(this.serviceAccountImpersonationUrl); - return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; - } - return null; - } - /** - * Provides a mechanism to inject GCP access tokens directly. - * When the provided credential expires, a new credential, using the - * external account options, is retrieved. - * @param credentials The Credentials object to set on the current client. - */ - setCredentials(credentials) { - super.setCredentials(credentials); - this.cachedAccessToken = credentials; - } - /** - * @return A promise that resolves with the current GCP access token - * response. If the current credential is expired, a new one is retrieved. - */ - async getAccessToken() { - // If cached access token is unavailable or expired, force refresh. - if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { - await this.refreshAccessTokenAsync(); - } - // Return GCP access token in GetAccessTokenResponse format. - return { - token: this.cachedAccessToken.access_token, - res: this.cachedAccessToken.res, - }; - } - /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * The result has the form: - * { Authorization: 'Bearer ' } - */ - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); - const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, - }; - return this.addSharedMetadataHeaders(headers); - } - request(opts, callback) { - if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); - } - else { - return this.requestAsync(opts); - } - } - /** - * @return A promise that resolves with the project ID corresponding to the - * current workload identity pool or current workforce pool if - * determinable. For workforce pool credential, it returns the project ID - * corresponding to the workforcePoolUserProject. - * This is introduced to match the current pattern of using the Auth - * library: - * const projectId = await auth.getProjectId(); - * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; - * const res = await client.request({ url }); - * The resource may not have permission - * (resourcemanager.projects.get) to call this API or the required - * scopes may not be selected: - * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes - */ - async getProjectId() { - const projectNumber = this.projectNumber || this.workforcePoolUserProject; - if (this.projectId) { - // Return previously determined project ID. - return this.projectId; - } - else if (projectNumber) { - // Preferable not to use request() to avoid retrial policies. - const headers = await this.getRequestHeaders(); - const response = await this.transporter.request({ - ...BaseExternalAccountClient.RETRY_CONFIG, - headers, - url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, - responseType: 'json', - }); - this.projectId = response.data.projectId; - return this.projectId; - } - return null; - } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. - * @return A promise that resolves with the successful response. - */ - async requestAsync(opts, reAuthRetried = false) { - let response; - try { - const requestHeaders = await this.getRequestHeaders(); - opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; + if (formatType === 'json' && !formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; + const file = credentialSourceOpts.get('file'); + const url = credentialSourceOpts.get('url'); + const headers = credentialSourceOpts.get('headers'); + if (file && url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); } - response = await this.transporter.request(opts); - } - catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - forceRefreshOnFailure is true - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!reAuthRetried && - isAuthErr && - !isReadableStream && - this.forceRefreshOnFailure) { - await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); - } + else if (file && !url) { + this.credentialSourceType = 'file'; + this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ + filePath: file, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + }); + } + else if (!file && url) { + this.credentialSourceType = 'url'; + this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ + url: url, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + headers: headers, + additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, + }); + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); } - throw e; - } - return response; - } - /** - * Forces token refresh, even if unexpired tokens are currently cached. - * External credentials are exchanged for GCP access tokens via the token - * exchange endpoint and other settings provided in the client options - * object. - * If the service_account_impersonation_url is provided, an additional - * step to exchange the external account GCP access token for a service - * account impersonated token is performed. - * @return A promise that resolves with the fresh GCP access tokens. - */ - async refreshAccessTokenAsync() { - // Retrieve the external credential. - const subjectToken = await this.retrieveSubjectToken(); - // Construct the STS credentials options. - const stsCredentialsOptions = { - grantType: STS_GRANT_TYPE, - audience: this.audience, - requestedTokenType: STS_REQUEST_TOKEN_TYPE, - subjectToken, - subjectTokenType: this.subjectTokenType, - // generateAccessToken requires the provided access token to have - // scopes: - // https://www.googleapis.com/auth/iam or - // https://www.googleapis.com/auth/cloud-platform - // The new service account access token scopes will match the user - // provided ones. - scope: this.serviceAccountImpersonationUrl - ? [DEFAULT_OAUTH_SCOPE] - : this.getScopesArray(), - }; - // Exchange the external credentials for a GCP access token. - // Client auth is prioritized over passing the workforcePoolUserProject - // parameter for STS token exchange. - const additionalOptions = !this.clientAuth && this.workforcePoolUserProject - ? { userProject: this.workforcePoolUserProject } - : undefined; - const additionalHeaders = { - 'x-goog-api-client': this.getMetricsHeaderValue(), - }; - const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); - if (this.serviceAccountImpersonationUrl) { - this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); - } - else if (stsResponse.expires_in) { - // Save response in cached access token. - this.cachedAccessToken = { - access_token: stsResponse.access_token, - expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, - res: stsResponse.res, - }; - } - else { - // Save response in cached access token. - this.cachedAccessToken = { - access_token: stsResponse.access_token, - res: stsResponse.res, - }; - } - // Save credentials. - this.credentials = {}; - Object.assign(this.credentials, this.cachedAccessToken); - delete this.credentials.res; - // Trigger tokens event to notify external listeners. - this.emit('tokens', { - refresh_token: null, - expiry_date: this.cachedAccessToken.expiry_date, - access_token: this.cachedAccessToken.access_token, - token_type: 'Bearer', - id_token: null, - }); - // Return the cached access token. - return this.cachedAccessToken; - } - /** - * Returns the workload identity pool project number if it is determinable - * from the audience resource name. - * @param audience The STS audience used to determine the project number. - * @return The project number associated with the workload identity pool, if - * this can be determined from the STS audience field. Otherwise, null is - * returned. - */ - getProjectNumber(audience) { - // STS audience pattern: - // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... - const match = audience.match(/\/projects\/([^/]+)/); - if (!match) { - return null; } - return match[1]; } /** - * Exchanges an external account GCP access token for a service - * account impersonated access token using iamcredentials - * GenerateAccessToken API. - * @param token The access token to exchange for a service account access - * token. - * @return A promise that resolves with the service account impersonated - * credentials response. + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. Gets a subject token by calling + * the configured {@link SubjectTokenSupplier} + * @return A promise that resolves with the external subject token. */ - async getImpersonatedAccessToken(token) { - const opts = { - ...BaseExternalAccountClient.RETRY_CONFIG, - url: this.serviceAccountImpersonationUrl, - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${token}`, - }, - data: { - scope: this.getScopesArray(), - lifetime: this.serviceAccountImpersonationLifetime + 's', - }, - responseType: 'json', - }; - const response = await this.transporter.request(opts); - const successResponse = response.data; - return { - access_token: successResponse.accessToken, - // Convert from ISO format to timestamp. - expiry_date: new Date(successResponse.expireTime).getTime(), - res: response, - }; + async retrieveSubjectToken() { + return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); } +} +exports.IdentityPoolClient = IdentityPoolClient; + + +/***/ }), + +/***/ 80298: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +class IdTokenClient extends oauth2client_1.OAuth2Client { /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param accessToken The credentials to check for expiration. - * @return Whether the credentials are expired or not. + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server */ - isExpired(accessToken) { - const now = new Date().getTime(); - return accessToken.expiry_date - ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis - : false; + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; } - /** - * @return The list of scopes for the requested GCP access token. - */ - getScopesArray() { - // Since scopes can be provided as string or array, the type should - // be normalized. - if (typeof this.scopes === 'string') { - return [this.scopes]; + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; } - return this.scopes || [DEFAULT_OAUTH_SCOPE]; + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; } - getMetricsHeaderValue() { - const nodeVersion = process.version.replace(/^v/, ''); - const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; - const credentialSourceType = this.credentialSourceType - ? this.credentialSourceType - : 'unknown'; - return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } } } -exports.BaseExternalAccountClient = BaseExternalAccountClient; +exports.IdTokenClient = IdTokenClient; /***/ }), -/***/ 6875: +/***/ 91103: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2013 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Compute = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const gcpMetadata = __nccwpck_require__(3563); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; const oauth2client_1 = __nccwpck_require__(3936); -class Compute extends oauth2client_1.OAuth2Client { +const gaxios_1 = __nccwpck_require__(59555); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { /** - * Google Compute Engine service account credentials. + * Impersonated service account credentials. * - * Retrieve access token from the metadata server. - * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. */ constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; super(options); // Start with an expired refresh token, which will automatically be // refreshed before the first API call is made. - this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; - this.serviceAccountEmail = options.serviceAccountEmail || 'default'; - this.scopes = Array.isArray(options.scopes) - ? options.scopes - : options.scopes - ? [options.scopes] - : []; + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; } /** - * Refreshes the access token. - * @param refreshToken Unused parameter + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * @return denoting the keyyID and signedBlob in base64 string */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; - let data; - try { - const instanceOptions = { - property: tokenPath, - }; - if (this.scopes.length > 0) { - instanceOptions.params = { - scopes: this.scopes.join(','), - }; - } - data = await gcpMetadata.instance(instanceOptions); - } - catch (e) { - if (e instanceof gaxios_1.GaxiosError) { - e.message = `Could not refresh access token: ${e.message}`; - this.wrapError(e); - } - throw e; - } - const tokens = data; - if (data && data.expires_in) { - tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res: null }; + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; } /** - * Fetches an ID token. - * @param targetAudience the audience for the fetched ID token. + * Refreshes the access token. */ - async fetchIdToken(targetAudience) { - const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + - `?format=full&audience=${targetAudience}`; - let idToken; + async refreshToken() { + var _a, _b, _c, _d, _e, _f; try { - const instanceOptions = { - property: idTokenPath, + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, }; - idToken = await gcpMetadata.instance(instanceOptions); } - catch (e) { - if (e instanceof Error) { - e.message = `Could not fetch ID token: ${e.message}`; + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; } - throw e; - } - return idToken; - } - wrapError(e) { - const res = e.response; - if (res && res.status) { - e.status = res.status; - if (res.status === 403) { - e.message = - 'A Forbidden error was returned while attempting to retrieve an access ' + - 'token for the Compute Engine built-in service account. This may be because the Compute ' + - 'Engine instance does not have the correct permission scopes specified: ' + - e.message; + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; } - else if (res.status === 404) { - e.message = - 'A Not Found error was returned while attempting to retrieve an access' + - 'token for the Compute Engine built-in service account. This may be because the Compute ' + - 'Engine instance does not have any permission scopes specified: ' + - e.message; + else { + error.message = `unable to impersonate: ${error}`; + throw error; } } } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } } -exports.Compute = Compute; +exports.Impersonated = Impersonated; /***/ }), -/***/ 9799: -/***/ (function(__unused_webpack_module, exports) { +/***/ 68740: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2024 Google LLC +// Copyright 2015 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -25530,198 +57359,194 @@ exports.Compute = Compute; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultAwsSecurityCredentialsSupplier = void 0; -/** - * Internal AWS security credentials supplier implementation used by {@link AwsClient} - * when a credential source is provided instead of a user defined supplier. - * The logic is summarized as: - * 1. If imdsv2_session_token_url is provided in the credential source, then - * fetch the aws session token and include it in the headers of the - * metadata requests. This is a requirement for IDMSv2 but optional - * for IDMSv1. - * 2. Retrieve AWS region from availability-zone. - * 3a. Check AWS credentials in environment variables. If not found, get - * from security-credentials endpoint. - * 3b. Get AWS credentials from security-credentials endpoint. In order - * to retrieve this, the AWS role needs to be determined by calling - * security-credentials endpoint without any argument. Then the - * credentials can be retrieved via: security-credentials/role_name - * 4. Generate the signed request to AWS STS GetCallerIdentity action. - * 5. Inject x-goog-cloud-target-resource into header and serialize the - * signed request. This will be the subject-token to pass to GCP STS. - */ -class DefaultAwsSecurityCredentialsSupplier { +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(4636); +const util_1 = __nccwpck_require__(68905); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { /** - * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information - * from the credential_source stored in the ADC file. - * @param opts The default aws security credentials supplier options object to - * build the supplier with. + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. */ - constructor(opts) { - _DefaultAwsSecurityCredentialsSupplier_instances.add(this); - this.regionUrl = opts.regionUrl; - this.securityCredentialsUrl = opts.securityCredentialsUrl; - this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; - this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; } /** - * Returns the active AWS region. This first checks to see if the region - * is available as an environment variable. If it is not, then the supplier - * will call the region URL. - * @param context {@link ExternalAccountSupplierContext} from the calling - * {@link AwsClient}, contains the requested audience and subject token type - * for the external account identity. - * @return A promise that resolves with the AWS region string. + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. */ - async getAwsRegion(context) { - // Priority order for region determination: - // AWS_REGION > AWS_DEFAULT_REGION > metadata server. - if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { - return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; } - const metadataHeaders = {}; - if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { - metadataHeaders['x-aws-ec2-metadata-token'] = - await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; } - if (!this.regionUrl) { - throw new Error('Unable to determine AWS region due to missing ' + - '"options.credential_source.region_url"'); + if (!cacheKey) { + throw Error('Scopes or url must be provided'); } - const opts = { - ...this.additionalGaxiosOptions, - url: this.regionUrl, - method: 'GET', - responseType: 'text', - headers: metadataHeaders, - }; - const response = await context.transporter.request(opts); - // Remove last character. For example, if us-east-2b is returned, - // the region would be us-east-2. - return response.data.substr(0, response.data.length - 1); + return cacheKey; } /** - * Returns AWS security credentials. This first checks to see if the credentials - * is available as environment variables. If it is not, then the supplier - * will call the security credentials URL. - * @param context {@link ExternalAccountSupplierContext} from the calling - * {@link AwsClient}, contains the requested audience and subject token type - * for the external account identity. - * @return A promise that resolves with the AWS security credentials. + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. */ - async getAwsSecurityCredentials(context) { - // Check environment variables for permanent credentials first. - // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html - if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { - return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; } - const metadataHeaders = {}; - if (this.imdsV2SessionTokenUrl) { - metadataHeaders['x-aws-ec2-metadata-token'] = - await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); } - // Since the role on a VM can change, we don't need to cache it. - const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); - // Temporary credentials typically last for several hours. - // Expiration is returned in response. - // Consider future optimization of this logic to cache AWS tokens - // until their natural expiration. - const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); - return { - accessKeyId: awsCreds.AccessKeyId, - secretAccessKey: awsCreds.SecretAccessKey, - token: awsCreds.Token, - }; + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; } -} -exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; -_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = -/** - * @param transporter The transporter to use for requests. - * @return A promise that resolves with the IMDSv2 Session Token. - */ -async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { - const opts = { - ...this.additionalGaxiosOptions, - url: this.imdsV2SessionTokenUrl, - method: 'PUT', - responseType: 'text', - headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, - }; - const response = await transporter.request(opts); - return response.data; -}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = -/** - * @param headers The headers to be used in the metadata request. - * @param transporter The transporter to use for requests. - * @return A promise that resolves with the assigned role to the current - * AWS VM. This is needed for calling the security-credentials endpoint. - */ -async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { - if (!this.securityCredentialsUrl) { - throw new Error('Unable to determine AWS role name due to missing ' + - '"options.credential_source.url"'); + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; } - const opts = { - ...this.additionalGaxiosOptions, - url: this.securityCredentialsUrl, - method: 'GET', - responseType: 'text', - headers: headers, - }; - const response = await transporter.request(opts); - return response.data; -}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = -/** - * Retrieves the temporary AWS credentials by calling the security-credentials - * endpoint as specified in the `credential_source` object. - * @param roleName The role attached to the current VM. - * @param headers The headers to be used in the metadata request. - * @param transporter The transporter to use for requests. - * @return A promise that resolves with the temporary AWS credentials - * needed for creating the GetCallerIdentity signed request. - */ -async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { - const response = await transporter.request({ - ...this.additionalGaxiosOptions, - url: `${this.securityCredentialsUrl}/${roleName}`, - responseType: 'json', - headers: headers, - }); - return response.data; -}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { - // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. - // Only one is required. - return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); -}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { - // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. - if (process.env['AWS_ACCESS_KEY_ID'] && - process.env['AWS_SECRET_ACCESS_KEY']) { - return { - accessKeyId: process.env['AWS_ACCESS_KEY_ID'], - secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], - token: process.env['AWS_SESSION_TOKEN'], - }; + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } } - return null; -}; + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; /***/ }), -/***/ 6270: +/***/ 13959: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2013 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -25735,360 +57560,281 @@ async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredent // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; -const stream = __nccwpck_require__(2781); -const authclient_1 = __nccwpck_require__(4627); -const sts = __nccwpck_require__(6308); -/** - * The required token exchange grant_type: rfc8693#section-2.1 - */ -const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; -/** - * The requested token exchange requested_token_type: rfc8693#section-2.1 - */ -const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** - * The requested token exchange subject_token_type: rfc8693#section-2.1 - */ -const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; -/** - * The maximum number of access boundary rules a Credential Access Boundary - * can contain. - */ -exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; -/** - * Offset to take into account network delays and server clock skews. - */ -exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; -/** - * Defines a set of Google credentials that are downscoped from an existing set - * of Google OAuth2 credentials. This is useful to restrict the Identity and - * Access Management (IAM) permissions that a short-lived credential can use. - * The common pattern of usage is to have a token broker with elevated access - * generate these downscoped credentials from higher access source credentials - * and pass the downscoped short-lived access tokens to a token consumer via - * some secure authenticated channel for limited access to Google Cloud Storage - * resources. - */ -class DownscopedClient extends authclient_1.AuthClient { +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(76031); +const jwtaccess_1 = __nccwpck_require__(68740); +const oauth2client_1 = __nccwpck_require__(3936); +const authclient_1 = __nccwpck_require__(44627); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } /** - * Instantiates a downscoped client object using the provided source - * AuthClient and credential access boundary rules. - * To downscope permissions of a source AuthClient, a Credential Access - * Boundary that specifies which resources the new credential can access, as - * well as an upper bound on the permissions that are available on each - * resource, has to be defined. A downscoped client can then be instantiated - * using the source AuthClient and the Credential Access Boundary. - * @param authClient The source AuthClient to be downscoped based on the - * provided Credential Access Boundary rules. - * @param credentialAccessBoundary The Credential Access Boundary which - * contains a list of access boundary rules. Each rule contains information - * on the resource that the rule applies to, the upper bound of the - * permissions that are available on that resource and an optional - * condition to further restrict permissions. - * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** - * Optional additional behavior customization options. - * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** - * Optional quota project id for setting up in the x-goog-user-project header. + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. */ - constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { - super({ ...additionalOptions, quotaProjectId }); - this.authClient = authClient; - this.credentialAccessBoundary = credentialAccessBoundary; - // Check 1-10 Access Boundary Rules are defined within Credential Access - // Boundary. - if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { - throw new Error('At least one access boundary rule needs to be defined.'); - } - else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > - exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { - throw new Error('The provided access boundary has more than ' + - `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); } - // Check at least one permission should be defined in each Access Boundary - // Rule. - for (const rule of credentialAccessBoundary.accessBoundary - .accessBoundaryRules) { - if (rule.availablePermissions.length === 0) { - throw new Error('At least one permission should be defined in access boundary rules.'); + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; } } - this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); - this.cachedDownscopedAccessToken = null; + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } } /** - * Provides a mechanism to inject Downscoped access tokens directly. - * The expiry_date field is required to facilitate determination of the token - * expiration which would make it easier for the token consumer to handle. - * @param credentials The Credentials object to set on the current client. + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. */ - setCredentials(credentials) { - if (!credentials.expiry_date) { - throw new Error('The access token expiry_date field is missing in the provided ' + - 'credentials.'); + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); } - super.setCredentials(credentials); - this.cachedDownscopedAccessToken = credentials; + return gtoken.idToken; } - async getAccessToken() { - // If the cached access token is unavailable or expired, force refresh. - // The Downscoped access token will be returned in - // DownscopedAccessTokenResponse format. - if (!this.cachedDownscopedAccessToken || - this.isExpired(this.cachedDownscopedAccessToken)) { - await this.refreshAccessTokenAsync(); + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; } - // Return Downscoped access token in DownscopedAccessTokenResponse format. - return { - token: this.cachedDownscopedAccessToken.access_token, - expirationTime: this.cachedDownscopedAccessToken.expiry_date, - res: this.cachedDownscopedAccessToken.res, - }; + return this.scopes.length > 0; } /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * The result has the form: - * { Authorization: 'Bearer ' } + * Are there any default or user scopes defined. */ - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); - const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, - }; - return this.addSharedMetadataHeaders(headers); + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; } - request(opts, callback) { + authorize(callback) { if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); + this.authorizeAsync().then(r => callback(null, r), callback); } else { - return this.requestAsync(opts); + return this.authorizeAsync(); } } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure - * @return A promise that resolves with the successful response. - */ - async requestAsync(opts, reAuthRetried = false) { - let response; - try { - const requestHeaders = await this.getRequestHeaders(); - opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; - } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; - } - response = await this.transporter.request(opts); - } - catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - forceRefreshOnFailure is true - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!reAuthRetried && - isAuthErr && - !isReadableStream && - this.forceRefreshOnFailure) { - await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); - } - } - throw e; + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); } - return response; + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; } /** - * Forces token refresh, even if unexpired tokens are currently cached. - * GCP access tokens are retrieved from authclient object/source credential. - * Then GCP access tokens are exchanged for downscoped access tokens via the - * token exchange endpoint. - * @return A promise that resolves with the fresh downscoped access token. + * Refreshes the access token. + * @param refreshToken ignored + * @private */ - async refreshAccessTokenAsync() { - var _a; - // Retrieve GCP access token from source credential. - const subjectToken = (await this.authClient.getAccessToken()).token; - // Construct the STS credentials options. - const stsCredentialsOptions = { - grantType: STS_GRANT_TYPE, - requestedTokenType: STS_REQUEST_TOKEN_TYPE, - subjectToken: subjectToken, - subjectTokenType: STS_SUBJECT_TOKEN_TYPE, - }; - // Exchange the source AuthClient access token for a Downscoped access - // token. - const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); - /** - * The STS endpoint will only return the expiration time for the downscoped - * access token if the original access token represents a service account. - * The downscoped token's expiration time will always match the source - * credential expiration. When no expires_in is returned, we can copy the - * source credential's expiration time. - */ - const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; - const expiryDate = stsResponse.expires_in - ? new Date().getTime() + stsResponse.expires_in * 1000 - : sourceCredExpireDate; - // Save response in cached access token. - this.cachedDownscopedAccessToken = { - access_token: stsResponse.access_token, - expiry_date: expiryDate, - res: stsResponse.res, - }; - // Save credentials. - this.credentials = {}; - Object.assign(this.credentials, this.cachedDownscopedAccessToken); - delete this.credentials.res; - // Trigger tokens event to notify external listeners. - this.emit('tokens', { - refresh_token: null, - expiry_date: this.cachedDownscopedAccessToken.expiry_date, - access_token: this.cachedDownscopedAccessToken.access_token, - token_type: 'Bearer', - id_token: null, + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), }); - // Return the cached access token. - return this.cachedDownscopedAccessToken; + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; } /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param downscopedAccessToken The credentials to check for expiration. - * @return Whether the credentials are expired or not. + * Create a gToken if it doesn't already exist. */ - isExpired(downscopedAccessToken) { - const now = new Date().getTime(); - return downscopedAccessToken.expiry_date - ? now >= - downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis - : false; - } -} -exports.DownscopedClient = DownscopedClient; - - -/***/ }), - -/***/ 1380: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2018 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getEnv = exports.clear = exports.GCPEnv = void 0; -const gcpMetadata = __nccwpck_require__(3563); -var GCPEnv; -(function (GCPEnv) { - GCPEnv["APP_ENGINE"] = "APP_ENGINE"; - GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; - GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; - GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; - GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; - GCPEnv["NONE"] = "NONE"; -})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); -let envPromise; -function clear() { - envPromise = undefined; -} -exports.clear = clear; -async function getEnv() { - if (envPromise) { - return envPromise; - } - envPromise = getEnvMemoized(); - return envPromise; -} -exports.getEnv = getEnv; -async function getEnvMemoized() { - let env = GCPEnv.NONE; - if (isAppEngine()) { - env = GCPEnv.APP_ENGINE; - } - else if (isCloudFunction()) { - env = GCPEnv.CLOUD_FUNCTIONS; + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; } - else if (await isComputeEngine()) { - if (await isKubernetesEngine()) { - env = GCPEnv.KUBERNETES_ENGINE; + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); } - else if (isCloudRun()) { - env = GCPEnv.CLOUD_RUN; + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); } else { - env = GCPEnv.COMPUTE_ENGINE; + return this.fromStreamAsync(inputStream); } } - else { - env = GCPEnv.NONE; + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); } - return env; -} -function isAppEngine() { - return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); -} -function isCloudFunction() { - return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); -} -/** - * This check only verifies that the environment is running knative. - * This must be run *after* checking for Kubernetes, otherwise it will - * return a false positive. - */ -function isCloudRun() { - return !!process.env.K_CONFIGURATION; -} -async function isKubernetesEngine() { - try { - await gcpMetadata.instance('attributes/cluster-name'); - return true; + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; } - catch (e) { - return false; + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); } } -async function isComputeEngine() { - return gcpMetadata.isAvailable(); -} +exports.JWT = JWT; /***/ }), -/***/ 8749: +/***/ 74524: /***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2022 Google LLC +// Copyright 2014 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26102,147 +57848,58 @@ async function isComputeEngine() { // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; -const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; -const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; -const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; -/** - * Defines the response of a 3rd party executable run by the pluggable auth client. - */ -class ExecutableResponse { +exports.LoginTicket = void 0; +class LoginTicket { /** - * Instantiates an ExecutableResponse instance using the provided JSON object - * from the output of the executable. - * @param responseJson Response from a 3rd party executable, loaded from a - * run of the executable or a cached output file. + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor */ - constructor(responseJson) { - // Check that the required fields exist in the json response. - if (!responseJson.version) { - throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); - } - if (responseJson.success === undefined) { - throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); - } - this.version = responseJson.version; - this.success = responseJson.success; - // Validate required fields for a successful response. - if (this.success) { - this.expirationTime = responseJson.expiration_time; - this.tokenType = responseJson.token_type; - // Validate token type field. - if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && - this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && - this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { - throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + - `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); - } - // Validate subject token. - if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { - if (!responseJson.saml_response) { - throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); - } - this.subjectToken = responseJson.saml_response; - } - else { - if (!responseJson.id_token) { - throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + - `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); - } - this.subjectToken = responseJson.id_token; - } - } - else { - // Both code and message must be provided for unsuccessful responses. - if (!responseJson.code) { - throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); - } - if (!responseJson.message) { - throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); - } - this.errorCode = responseJson.code; - this.errorMessage = responseJson.message; - } + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; } /** - * @return A boolean representing if the response has a valid token. Returns - * true when the response was successful and the token is not expired. + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID */ - isValid() { - return !this.isExpired() && this.success; + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; } /** - * @return A boolean representing if the response is expired. Returns true if the - * provided timeout has passed. + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload */ - isExpired() { - return (this.expirationTime !== undefined && - this.expirationTime < Math.round(Date.now() / 1000)); - } -} -exports.ExecutableResponse = ExecutableResponse; -/** - * An error thrown by the ExecutableResponse class. - */ -class ExecutableResponseError extends Error { - constructor(message) { - super(message); - Object.setPrototypeOf(this, new.target.prototype); + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; } } -exports.ExecutableResponseError = ExecutableResponseError; -/** - * An error thrown when the 'version' field in an executable response is missing or invalid. - */ -class InvalidVersionFieldError extends ExecutableResponseError { -} -exports.InvalidVersionFieldError = InvalidVersionFieldError; -/** - * An error thrown when the 'success' field in an executable response is missing or invalid. - */ -class InvalidSuccessFieldError extends ExecutableResponseError { -} -exports.InvalidSuccessFieldError = InvalidSuccessFieldError; -/** - * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. - */ -class InvalidExpirationTimeFieldError extends ExecutableResponseError { -} -exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; -/** - * An error thrown when the 'token_type' field in an executable response is missing or invalid. - */ -class InvalidTokenTypeFieldError extends ExecutableResponseError { -} -exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; -/** - * An error thrown when the 'code' field in an executable response is missing or invalid. - */ -class InvalidCodeFieldError extends ExecutableResponseError { -} -exports.InvalidCodeFieldError = InvalidCodeFieldError; -/** - * An error thrown when the 'message' field in an executable response is missing or invalid. - */ -class InvalidMessageFieldError extends ExecutableResponseError { -} -exports.InvalidMessageFieldError = InvalidMessageFieldError; -/** - * An error thrown when the subject token in an executable response is missing or invalid. - */ -class InvalidSubjectTokenError extends ExecutableResponseError { -} -exports.InvalidSubjectTokenError = InvalidSubjectTokenError; +exports.LoginTicket = LoginTicket; /***/ }), -/***/ 8765: +/***/ 3936: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2023 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26256,144 +57913,373 @@ exports.InvalidSubjectTokenError = InvalidSubjectTokenError; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; -const authclient_1 = __nccwpck_require__(4627); -const oauth2common_1 = __nccwpck_require__(9510); -const gaxios_1 = __nccwpck_require__(9555); -const stream = __nccwpck_require__(2781); -const baseexternalclient_1 = __nccwpck_require__(7391); -/** - * The credentials JSON file type for external account authorized user clients. - */ -exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; -const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; -/** - * Handler for token refresh requests sent to the token_url endpoint for external - * authorized user credentials. - */ -class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { +exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const querystring = __nccwpck_require__(63477); +const stream = __nccwpck_require__(12781); +const formatEcdsa = __nccwpck_require__(11728); +const crypto_1 = __nccwpck_require__(78043); +const authclient_1 = __nccwpck_require__(44627); +const loginticket_1 = __nccwpck_require__(74524); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } /** - * Initializes an ExternalAccountAuthorizedUserHandler instance. - * @param url The URL of the token refresh endpoint. - * @param transporter The transporter to use for the refresh request. - * @param clientAuthentication The client authentication credentials to use - * for the refresh request. + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. */ - constructor(url, transporter, clientAuthentication) { - super(clientAuthentication); - this.url = url; - this.transporter = transporter; + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const values = { + code: options.code, + client_id: options.client_id || this._clientId, + client_secret: this._clientSecret, + redirect_uri: options.redirect_uri || this.redirectUri, + grant_type: 'authorization_code', + code_verifier: options.codeVerifier, + }; + const res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(values), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } } /** - * Requests a new access token from the token_url endpoint using the provided - * refresh token. - * @param refreshToken The refresh token to use to generate a new access token. - * @param additionalHeaders Optional additional headers to pass along the - * request. - * @return A promise that resolves with the token refresh response containing - * the requested access token and its expiration time. + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized */ - async refreshToken(refreshToken, additionalHeaders) { - const values = new URLSearchParams({ - grant_type: 'refresh_token', - refresh_token: refreshToken, - }); - const headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - ...additionalHeaders, - }; - const opts = { - ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, - url: this.url, - method: 'POST', - headers, - data: values.toString(), - responseType: 'json', - }; - // Apply OAuth client authentication. - this.applyClientAuthenticationOptions(opts); + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; try { - const response = await this.transporter.request(opts); - // Successful response. - const tokenRefreshResponse = response.data; - tokenRefreshResponse.res = response; - return tokenRefreshResponse; + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; } - catch (error) { - // Translate error to OAuthError. - if (error instanceof gaxios_1.GaxiosError && error.response) { - throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, - // Preserve other fields from the original error. - error); + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; } - // Request could fail before the server responds. - throw error; + throw e; } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; } -} -/** - * External Account Authorized User Client. This is used for OAuth2 credentials - * sourced using external identities through Workforce Identity Federation. - * Obtaining the initial access and refresh token can be done through the - * Google Cloud CLI. - */ -class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { /** - * Instantiates an ExternalAccountAuthorizedUserClient instances using the - * provided JSON object loaded from a credentials files. - * An error is throws if the credential is not valid. - * @param options The external account authorized user option object typically - * from the external accoutn authorized user JSON credential file. - * @param additionalOptions **DEPRECATED, all options are available in the - * `options` parameter.** Optional additional behavior customization options. - * These currently customize expiration threshold time and whether to retry - * on 401/403 API request errors. + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} */ - constructor(options, additionalOptions) { - var _a; - super({ ...options, ...additionalOptions }); - if (options.universe_domain) { - this.universeDomain = options.universe_domain; - } - this.refreshToken = options.refresh_token; - const clientAuth = { - confidentialClientType: 'basic', - clientId: options.client_id, - clientSecret: options.client_secret, + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + ...OAuth2Client.RETRY_CONFIG, + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', }; - this.externalAccountAuthorizedUserHandler = - new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); - this.cachedAccessToken = null; - this.quotaProjectId = options.quota_project_id; - // As threshold could be zero, - // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the - // zero value. - if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { - this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); } else { - this.eagerRefreshThresholdMillis = additionalOptions - .eagerRefreshThresholdMillis; + return this.transporter.request(opts); } - this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); } - async getAccessToken() { - // If cached access token is unavailable or expired, force refresh. - if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { - await this.refreshAccessTokenAsync(); + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); } - // Return GCP access token in GetAccessTokenResponse format. - return { - token: this.cachedAccessToken.access_token, - res: this.cachedAccessToken.res, - }; } - async getRequestHeaders() { - const accessTokenResponse = await this.getAccessToken(); - const headers = { - Authorization: `Bearer ${accessTokenResponse.token}`, - }; - return this.addSharedMetadataHeaders(headers); + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } } request(opts, callback) { if (callback) { @@ -26405,26 +58291,21 @@ class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { return this.requestAsync(opts); } } - /** - * Authenticates the provided HTTP request, processes it and resolves with the - * returned response. - * @param opts The HTTP request options. - * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. - * @return A promise that resolves with the successful response. - */ async requestAsync(opts, reAuthRetried = false) { - let response; + let r2; try { - const requestHeaders = await this.getRequestHeaders(); + const r = await this.getRequestMetadataAsync(opts.url); opts.headers = opts.headers || {}; - if (requestHeaders && requestHeaders['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = - requestHeaders['x-goog-user-project']; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; } - if (requestHeaders && requestHeaders.Authorization) { - opts.headers.Authorization = requestHeaders.Authorization; + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; } - response = await this.transporter.request(opts); + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); } catch (e) { const res = e.response; @@ -26434,223 +58315,372 @@ class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream - // - forceRefreshOnFailure is true + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; if (!reAuthRetried && isAuthErr && !isReadableStream && - this.forceRefreshOnFailure) { + mayRequireRefresh) { await this.refreshAccessTokenAsync(); - return await this.requestAsync(opts, true); + return this.requestAsync(opts, true); + } + else if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); } } throw e; } - return response; + return r2; } - /** - * Forces token refresh, even if unexpired tokens are currently cached. - * @return A promise that resolves with the refreshed credential. - */ - async refreshAccessTokenAsync() { - // Refresh the access token using the refresh token. - const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); - this.cachedAccessToken = { - access_token: refreshResponse.access_token, - expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, - res: refreshResponse.res, - }; - if (refreshResponse.refresh_token !== undefined) { - this.refreshToken = refreshResponse.refresh_token; + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); } - return this.cachedAccessToken; + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; } /** - * Returns whether the provided credentials are expired or not. - * If there is no expiry time, assumes the token is not expired or expiring. - * @param credentials The credentials to check for expiration. - * @return Whether the credentials are expired or not. + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. */ - isExpired(credentials) { - const now = new Date().getTime(); - return credentials.expiry_date - ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis - : false; + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; } -} -exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; - - -/***/ }), - -/***/ 4381: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ExternalAccountClient = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const identitypoolclient_1 = __nccwpck_require__(117); -const awsclient_1 = __nccwpck_require__(1569); -const pluggable_auth_client_1 = __nccwpck_require__(4782); -/** - * Dummy class with no constructor. Developers are expected to use fromJSON. - */ -class ExternalAccountClient { - constructor() { - throw new Error('ExternalAccountClients should be initialized via: ' + - 'ExternalAccountClient.fromJSON(), ' + - 'directly via explicit constructors, eg. ' + - 'new AwsClient(options), new IdentityPoolClient(options), new' + - 'PluggableAuthClientOptions, or via ' + - 'new GoogleAuth(options).getClient()'); + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } } - /** - * This static method will instantiate the - * corresponding type of external account credential depending on the - * underlying credential source. - * @param options The external account options object typically loaded - * from the external account JSON credential file. - * @param additionalOptions **DEPRECATED, all options are available in the - * `options` parameter.** Optional additional behavior customization options. - * These currently customize expiration threshold time and whether to retry - * on 401/403 API request errors. - * @return A BaseExternalAccountClient instance or null if the options - * provided do not correspond to an external account credential. - */ - static fromJSON(options, additionalOptions) { - var _a, _b; - if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { - return new awsclient_1.AwsClient(options, additionalOptions); - } - else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { - return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; } - else { - return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds } } - else { - return null; + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; } -} -exports.ExternalAccountClient = ExternalAccountClient; - - -/***/ }), - -/***/ 7646: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var _a, _b, _c; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.FileSubjectTokenSupplier = void 0; -const util_1 = __nccwpck_require__(3837); -const fs = __nccwpck_require__(7147); -// fs.readfile is undefined in browser karma tests causing -// `npm run browser-test` to fail as test.oauth2.ts imports this file via -// src/index.ts. -// Fallback to void function to avoid promisify throwing a TypeError. -const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); -const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); -const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); -/** - * Internal subject token supplier implementation used when a file location - * is configured in the credential configuration used to build an {@link IdentityPoolClient} - */ -class FileSubjectTokenSupplier { - /** - * Instantiates a new file based subject token supplier. - * @param opts The file subject token supplier options to build the supplier - * with. - */ - constructor(opts) { - this.filePath = opts.filePath; - this.formatType = opts.formatType; - this.subjectTokenFieldName = opts.subjectTokenFieldName; + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); } /** - * Returns the subject token stored at the file specified in the constructor. - * @param context {@link ExternalAccountSupplierContext} from the calling - * {@link IdentityPoolClient}, contains the requested audience and subject - * token type for the external account identity. Not used. + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. */ - async getSubjectToken(context) { - // Make sure there is a file at the path. lstatSync will throw if there is - // nothing there. - let parsedFilePath = this.filePath; + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; try { - // Resolve path to actual file in case of symlink. Expect a thrown error - // if not resolvable. - parsedFilePath = await realpath(parsedFilePath); - if (!(await lstat(parsedFilePath)).isFile()) { - throw new Error(); + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); } catch (err) { if (err instanceof Error) { - err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + err.message = `Can't parse token payload '${segments[0]}`; } throw err; } - let subjectToken; - const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); - if (this.formatType === 'text') { - subjectToken = rawText; + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); } - else if (this.formatType === 'json' && this.subjectTokenFieldName) { - const json = JSON.parse(rawText); - subjectToken = json[this.subjectTokenFieldName]; + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); } - if (!subjectToken) { - throw new Error('Unable to parse the subject_token from the credential_source file'); + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); } - return subjectToken; + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; } } -exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; /***/ }), -/***/ 695: +/***/ 19510: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2019 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26664,810 +58694,779 @@ exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleAuth = exports.CLOUD_SDK_CLIENT_ID = void 0; -const child_process_1 = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -const gcpMetadata = __nccwpck_require__(3563); -const os = __nccwpck_require__(2037); -const path = __nccwpck_require__(1017); -const crypto_1 = __nccwpck_require__(8043); -const transporters_1 = __nccwpck_require__(2649); -const computeclient_1 = __nccwpck_require__(6875); -const idtokenclient_1 = __nccwpck_require__(298); -const envDetect_1 = __nccwpck_require__(1380); -const jwtclient_1 = __nccwpck_require__(3959); -const refreshclient_1 = __nccwpck_require__(8790); -const impersonated_1 = __nccwpck_require__(1103); -const externalclient_1 = __nccwpck_require__(4381); -const baseexternalclient_1 = __nccwpck_require__(7391); -const authclient_1 = __nccwpck_require__(4627); -const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(8765); -const util_1 = __nccwpck_require__(8905); -exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; -const GoogleAuthExceptionMessages = { - NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + - 'To learn more about authentication and Google APIs, visit: \n' + - 'https://cloud.google.com/docs/authentication/getting-started', - NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + - 'To learn more about authentication and Google APIs, visit: \n' + - 'https://cloud.google.com/docs/authentication/getting-started', - NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + - 'To learn more about Universe Domain retrieval, visit: \n' + - 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', -}; -class GoogleAuth { - // Note: this properly is only public to satisify unit tests. - // https://github.com/Microsoft/TypeScript/issues/5228 - get isGCE() { - return this.checkIsGCE; - } +exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; +const querystring = __nccwpck_require__(63477); +const crypto_1 = __nccwpck_require__(78043); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { /** - * Configuration is resolved in the following order of precedence: - * - {@link GoogleAuthOptions.credentials `credentials`} - * - {@link GoogleAuthOptions.keyFilename `keyFilename`} - * - {@link GoogleAuthOptions.keyFile `keyFile`} - * - * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the - * {@link AuthClient `AuthClient`s}. - * - * @param opts + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. */ - constructor(opts) { - /** - * Caches a value indicating whether the auth layer is running on Google - * Compute Engine. - * @private - */ - this.checkIsGCE = undefined; - // To save the contents of the JSON credential file - this.jsonContent = null; - this.cachedCredential = null; - this.clientOptions = {}; - opts = opts || {}; - this._cachedProjectId = opts.projectId || null; - this.cachedCredential = opts.authClient || null; - this.keyFilename = opts.keyFilename || opts.keyFile; - this.scopes = opts.scopes; - this.jsonContent = opts.credentials || null; - this.clientOptions = opts.clientOptions || {}; - if (opts.universeDomain) { - this.clientOptions.universeDomain = opts.universeDomain; - } - } - // GAPIC client libraries should always use self-signed JWTs. The following - // variables are set on the JWT client in order to indicate the type of library, - // and sign the JWT with the correct audience and scopes (if not supplied). - setGapicJWTValues(client) { - client.defaultServicePath = this.defaultServicePath; - client.useJWTAccessWithScope = this.useJWTAccessWithScope; - client.defaultScopes = this.defaultScopes; - } - getProjectId(callback) { - if (callback) { - this.getProjectIdAsync().then(r => callback(null, r), callback); - } - else { - return this.getProjectIdAsync(); - } + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); } /** - * A temporary method for internal `getProjectId` usages where `null` is - * acceptable. In a future major release, `getProjectId` should return `null` - * (as the `Promise` base signature describes) and this private - * method should be removed. - * - * @returns Promise that resolves with project id (or `null`) - */ - async getProjectIdOptional() { - try { - return await this.getProjectId(); - } - catch (e) { - if (e instanceof Error && - e.message === GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { - return null; - } - else { - throw e; - } - } - } - /* - * A private method for finding and caching a projectId. - * - * Supports environments in order of precedence: - * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable - * - GOOGLE_APPLICATION_CREDENTIALS JSON file - * - Cloud SDK: `gcloud config config-helper --format json` - * - GCE project ID from metadata server - * - * @returns projectId + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. */ - async findAndCacheProjectId() { - let projectId = null; - projectId || (projectId = await this.getProductionProjectId()); - projectId || (projectId = await this.getFileProjectId()); - projectId || (projectId = await this.getDefaultServiceProjectId()); - projectId || (projectId = await this.getGCEProjectId()); - projectId || (projectId = await this.getExternalAccountClientProjectId()); - if (projectId) { - this._cachedProjectId = projectId; - return projectId; - } - else { - throw new Error(GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); - } - } - async getProjectIdAsync() { - if (this._cachedProjectId) { - return this._cachedProjectId; - } - if (!this._findProjectIdPromise) { - this._findProjectIdPromise = this.findAndCacheProjectId(); + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); } - return this._findProjectIdPromise; } /** - * Retrieves a universe domain from the metadata server via - * {@link gcpMetadata.universe}. + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. * - * @returns a universe domain + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. */ - async getUniverseDomainFromMetadataServer() { + injectAuthenticatedHeaders(opts, bearerToken) { var _a; - let universeDomain; - try { - universeDomain = await gcpMetadata.universe('universe-domain'); - universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); } - catch (e) { - if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { - universeDomain = authclient_1.DEFAULT_UNIVERSE; - } - else { - throw e; - } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); } - return universeDomain; } /** - * Retrieves, caches, and returns the universe domain in the following order - * of precedence: - * - The universe domain in {@link GoogleAuth.clientOptions} - * - An existing or ADC {@link AuthClient}'s universe domain - * - {@link gcpMetadata.universe}, if {@link Compute} client + * Applies client authentication on the request's body if request-body + * client authentication is selected. * - * @returns The universe domain - */ - async getUniverseDomain() { - let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); - try { - universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); - } - catch (_a) { - // client or ADC is not available - universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); - } - return universeDomain; - } - /** - * @returns Any scopes (user-specified or default scopes specified by the - * client library) that need to be set on the current Auth client. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. */ - getAnyScopes() { - return this.scopes || this.defaultScopes; - } - getApplicationDefault(optionsOrCallback = {}, callback) { - let options; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; - } - else { - options = optionsOrCallback; - } - if (callback) { - this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); - } - else { - return this.getApplicationDefaultAsync(options); - } - } - async getApplicationDefaultAsync(options = {}) { - // If we've already got a cached credential, return it. - // This will also preserve one's configured quota project, in case they - // set one directly on the credential previously. - if (this.cachedCredential) { - return await this.prepareAndCacheADC(this.cachedCredential); - } - // Since this is a 'new' ADC to cache we will use the environment variable - // if it's available. We prefer this value over the value from ADC. - const quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT']; - let credential; - // Check for the existence of a local environment variable pointing to the - // location of the credential file. This is typically used in local - // developer scenarios. - credential = - await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); - if (credential) { - if (credential instanceof jwtclient_1.JWT) { - credential.scopes = this.scopes; - } - else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { - credential.scopes = this.getAnyScopes(); - } - return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); - } - // Look in the well-known credential file location. - credential = - await this._tryGetApplicationCredentialsFromWellKnownFile(options); - if (credential) { - if (credential instanceof jwtclient_1.JWT) { - credential.scopes = this.scopes; - } - else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { - credential.scopes = this.getAnyScopes(); - } - return await this.prepareAndCacheADC(credential, quotaProjectIdOverride); - } - // Determine if we're running on GCE. - if (await this._checkIsGCE()) { - // set universe domain for Compute client - if (!(0, util_1.originalOrCamelOptions)(options).get('universe_domain')) { - options.universeDomain = - await this.getUniverseDomainFromMetadataServer(); + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); } - options.scopes = this.getAnyScopes(); - return await this.prepareAndCacheADC(new computeclient_1.Compute(options), quotaProjectIdOverride); - } - throw new Error('Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.'); - } - async prepareAndCacheADC(credential, quotaProjectIdOverride) { - const projectId = await this.getProjectIdOptional(); - if (quotaProjectIdOverride) { - credential.quotaProjectId = quotaProjectIdOverride; } - this.cachedCredential = credential; - return { credential, projectId }; } /** - * Determines whether the auth layer is running on Google Compute Engine. - * Checks for GCP Residency, then fallback to checking if metadata server - * is available. + * Retry config for Auth-related requests. * - * @returns A promise that resolves with the boolean. - * @api private + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. */ - async _checkIsGCE() { - if (this.checkIsGCE === undefined) { - this.checkIsGCE = - gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); - } - return this.checkIsGCE; + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; } - /** - * Attempts to load default credentials from the environment variable path.. - * @returns Promise that resolves with the OAuth2Client or null. - * @api private - */ - async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { - const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || - process.env['google_application_credentials']; - if (!credentialsPath || credentialsPath.length === 0) { - return null; - } - try { - return this._getApplicationCredentialsFromFilePath(credentialsPath, options); - } - catch (e) { - if (e instanceof Error) { - e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; - } - throw e; - } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; } - /** - * Attempts to load default credentials from a well-known file location - * @return Promise that resolves with the OAuth2Client or null. - * @api private - */ - async _tryGetApplicationCredentialsFromWellKnownFile(options) { - // First, figure out the location of the file, depending upon the OS type. - let location = null; - if (this._isWindows()) { - // Windows - location = process.env['APPDATA']; - } - else { - // Linux or Mac - const home = process.env['HOME']; - if (home) { - location = path.join(home, '.config'); - } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); } - // If we found the root path, expand it. - if (location) { - location = path.join(location, 'gcloud', 'application_default_credentials.json'); - if (!fs.existsSync(location)) { - location = null; + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); } - } - // The file does not exist. - if (!location) { - return null; - } - // The file seems to exist. Try to use it. - const client = await this._getApplicationCredentialsFromFilePath(location, options); - return client; + }); } + return newError; +} +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; + + +/***/ }), + +/***/ 32460: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PassThroughClient = void 0; +const authclient_1 = __nccwpck_require__(44627); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { /** - * Attempts to load default credentials from a file at the given path.. - * @param filePath The path to the file to read. - * @returns Promise that resolves with the OAuth2Client - * @api private + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. */ - async _getApplicationCredentialsFromFilePath(filePath, options = {}) { - // Make sure the path looks like a string. - if (!filePath || filePath.length === 0) { - throw new Error('The file path is invalid.'); - } - // Make sure there is a file at the path. lstatSync will throw if there is - // nothing there. - try { - // Resolve path to actual file in case of symlink. Expect a thrown error - // if not resolvable. - filePath = fs.realpathSync(filePath); - if (!fs.lstatSync(filePath).isFile()) { - throw new Error(); - } - } - catch (err) { - if (err instanceof Error) { - err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; - } - throw err; - } - // Now open a read stream on the file, and parse it. - const readStream = fs.createReadStream(filePath); - return this.fromStream(readStream, options); + async request(opts) { + return this.transporter.request(opts); } /** - * Create a credentials instance using a given impersonated input options. - * @param json The impersonated input object. - * @returns JWT or UserRefresh Client with data + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} */ - fromImpersonatedJSON(json) { - var _a, _b, _c, _d, _e; - if (!json) { - throw new Error('Must pass in a JSON object containing an impersonated refresh token'); - } - if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); - } - if (!json.source_credentials) { - throw new Error('The incoming JSON object does not contain a source_credentials field'); - } - if (!json.service_account_impersonation_url) { - throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); - } - // Create source client for impersonation - const sourceClient = new refreshclient_1.UserRefreshClient(); - sourceClient.fromJSON(json.source_credentials); - if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { - /** - * Prevents DOS attacks. - * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} - **/ - throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); - } - // Extreact service account from service_account_impersonation_url - const targetPrincipal = (_c = (_b = /(?[^/]+):generateAccessToken$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; - if (!targetPrincipal) { - throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); - } - const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; - const client = new impersonated_1.Impersonated({ - ...json, - delegates: (_e = json.delegates) !== null && _e !== void 0 ? _e : [], - sourceClient: sourceClient, - targetPrincipal: targetPrincipal, - targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], - }); - return client; + async getAccessToken() { + return {}; } /** - * Create a credentials instance using the given input options. - * @param json The input object. - * @param options The JWT or UserRefresh options for the client - * @returns JWT or UserRefresh Client with data + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} */ - fromJSON(json, options = {}) { - let client; - // user's preferred universe domain - const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); - if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { - client = new refreshclient_1.UserRefreshClient(options); - client.fromJSON(json); - } - else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { - client = this.fromImpersonatedJSON(json); - } - else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - client = externalclient_1.ExternalAccountClient.fromJSON(json, options); - client.scopes = this.getAnyScopes(); - } - else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { - client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); - } - else { - options.scopes = this.scopes; - client = new jwtclient_1.JWT(options); - this.setGapicJWTValues(client); - client.fromJSON(json); - } - if (preferredUniverseDomain) { - client.universeDomain = preferredUniverseDomain; - } - return client; + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); + + +/***/ }), + +/***/ 44782: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = __nccwpck_require__(40810); +const executable_response_1 = __nccwpck_require__(8749); +const pluggable_auth_handler_1 = __nccwpck_require__(18941); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { /** - * Return a JWT or UserRefreshClient from JavaScript object, caching both the - * object used to instantiate and the client. - * @param json The input object. - * @param options The JWT or UserRefresh options for the client - * @returns JWT or UserRefresh Client with data + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. */ - _cacheClientFromJSON(json, options) { - const client = this.fromJSON(json, options); - // cache both raw data used to instantiate client and client itself. - this.jsonContent = json; - this.cachedCredential = client; - return client; - } - fromStream(inputStream, optionsOrCallback = {}, callback) { - let options = {}; - if (typeof optionsOrCallback === 'function') { - callback = optionsOrCallback; + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); } - else { - options = optionsOrCallback; + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); } - if (callback) { - this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; } else { - return this.fromStreamAsync(inputStream, options); - } - } - fromStreamAsync(inputStream, options) { - return new Promise((resolve, reject) => { - if (!inputStream) { - throw new Error('Must pass in a stream containing the Google auth settings.'); + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - try { - const data = JSON.parse(s); - const r = this._cacheClientFromJSON(data, options); - return resolve(r); - } - catch (err) { - // If we failed parsing this.keyFileName, assume that it - // is a PEM or p12 certificate: - if (!this.keyFilename) - throw err; - const client = new jwtclient_1.JWT({ - ...this.clientOptions, - keyFile: this.keyFilename, - }); - this.cachedCredential = client; - this.setGapicJWTValues(client); - return resolve(client); - } - } - catch (err) { - return reject(err); - } - }); + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, }); + this.credentialSourceType = 'executable'; } /** - * Create a credentials instance using the given API key string. - * @param apiKey The API key string - * @param options An optional options object. - * @returns A JWT loaded from the key + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. */ - fromAPIKey(apiKey, options) { - options = options || {}; - const client = new jwtclient_1.JWT(options); - client.fromAPIKey(apiKey); - return client; + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 18941: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(44782); +const executable_response_1 = __nccwpck_require__(8749); +const childProcess = __nccwpck_require__(32081); +const fs = __nccwpck_require__(57147); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { /** - * Determines whether the current operating system is Windows. - * @api private + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. */ - _isWindows() { - const sys = os.platform(); - if (sys && sys.length >= 3) { - if (sys.substring(0, 3).toLowerCase() === 'win') { - return true; - } + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); } - return false; + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; } /** - * Run the Google Cloud SDK command that prints the default project ID + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. */ - async getDefaultServiceProjectId() { - return new Promise(resolve => { - (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { - if (!err && stdout) { + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. try { - const projectId = JSON.parse(stdout).configuration.properties.core.project; - resolve(projectId); - return; + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); } - catch (e) { - // ignore errors + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); } } - resolve(null); + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } }); }); } /** - * Loads the project id from environment variables. - * @api private - */ - getProductionProjectId() { - return (process.env['GCLOUD_PROJECT'] || - process.env['GOOGLE_CLOUD_PROJECT'] || - process.env['gcloud_project'] || - process.env['google_cloud_project']); - } - /** - * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. - * @api private - */ - async getFileProjectId() { - if (this.cachedCredential) { - // Try to read the project ID from the cached credentials file - return this.cachedCredential.projectId; - } - // Ensure the projectId is loaded from the keyFile if available. - if (this.keyFilename) { - const creds = await this.getClient(); - if (creds && creds.projectId) { - return creds.projectId; - } - } - // Try to load a credentials file and read its project ID - const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); - if (r) { - return r.projectId; - } - else { - return null; - } - } - /** - * Gets the project ID from external account client if available. + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. */ - async getExternalAccountClientProjectId() { - if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { - return null; + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; } - const creds = await this.getClient(); - // Do not suppress the underlying error, as the error could contain helpful - // information for debugging and fixing. This is especially true for - // external account creds as in order to get the project ID, the following - // operations have to succeed: - // 1. Valid credentials file should be supplied. - // 2. Ability to retrieve access tokens from STS token exchange API. - // 3. Ability to exchange for service account impersonated credentials (if - // enabled). - // 4. Ability to get project info using the access token from step 2 or 3. - // Without surfacing the error, it is harder for developers to determine - // which step went wrong. - return await creds.getProjectId(); - } - /** - * Gets the Compute Engine project ID if it can be inferred. - */ - async getGCEProjectId() { + let filePath; try { - const r = await gcpMetadata.project('project-id'); - return r; - } - catch (e) { - // Ignore any errors - return null; + filePath = await fs.promises.realpath(this.outputFile); } - } - getCredentials(callback) { - if (callback) { - this.getCredentialsAsync().then(r => callback(null, r), callback); + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; } - else { - return this.getCredentialsAsync(); + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; } - } - async getCredentialsAsync() { - const client = await this.getClient(); - if (client instanceof impersonated_1.Impersonated) { - return { client_email: client.getTargetPrincipal() }; + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; } - if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { - const serviceAccountEmail = client.getServiceAccountEmail(); - if (serviceAccountEmail) { - return { - client_email: serviceAccountEmail, - universe_domain: client.universeDomain, - }; + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); } + return undefined; } - if (this.jsonContent) { - return { - client_email: this.jsonContent.client_email, - private_key: this.jsonContent.private_key, - universe_domain: this.jsonContent.universe_domain, - }; - } - if (await this._checkIsGCE()) { - const [client_email, universe_domain] = await Promise.all([ - gcpMetadata.instance('service-accounts/default/email'), - this.getUniverseDomain(), - ]); - return { client_email, universe_domain }; - } - throw new Error(GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); - } - /** - * Automatically obtain an {@link AuthClient `AuthClient`} based on the - * provided configuration. If no options were passed, use Application - * Default Credentials. - */ - async getClient() { - if (!this.cachedCredential) { - if (this.jsonContent) { - this._cacheClientFromJSON(this.jsonContent, this.clientOptions); - } - else if (this.keyFilename) { - const filePath = path.resolve(this.keyFilename); - const stream = fs.createReadStream(filePath); - await this.fromStreamAsync(stream, this.clientOptions); - } - else { - await this.getApplicationDefaultAsync(this.clientOptions); + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); } - return this.cachedCredential; } /** - * Creates a client which will fetch an ID token for authorization. - * @param targetAudience the audience for the fetched ID token. - * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. */ - async getIdTokenClient(targetAudience) { - const client = await this.getClient(); - if (!('fetchIdToken' in client)) { - throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); } - return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); - } - /** - * Automatically obtain application default credentials, and return - * an access token for making requests. - */ - async getAccessToken() { - const client = await this.getClient(); - return (await client.getAccessToken()).token; - } - /** - * Obtain the HTTP headers that will provide authorization for a given - * request. - */ - async getRequestHeaders(url) { - const client = await this.getClient(); - return client.getRequestHeaders(url); + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; } - /** - * Obtain credentials for a request, then attach the appropriate headers to - * the request options. - * @param opts Axios or Request options on which to attach the headers - */ - async authorizeRequest(opts) { - opts = opts || {}; - const url = opts.url || opts.uri; - const client = await this.getClient(); - const headers = await client.getRequestHeaders(url); - opts.headers = Object.assign(opts.headers || {}, headers); - return opts; +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 98790: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(3936); +const querystring_1 = __nccwpck_require__(63477); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; } /** - * Automatically obtain application default credentials, and make an - * HTTP request using the given options. - * @param opts Axios request options for the HTTP request. + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - async request(opts) { - const client = await this.getClient(); - return client.request(opts); + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); } - /** - * Determine the compute environment in which the code is running. - */ - getEnv() { - return (0, envDetect_1.getEnv)(); + async fetchIdToken(targetAudience) { + const res = await this.transporter.request({ + ...UserRefreshClient.RETRY_CONFIG, + url: this.endpoints.oauth2TokenUrl, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + data: (0, querystring_1.stringify)({ + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + refresh_token: this._refreshToken, + target_audience: targetAudience, + }), + }); + return res.data.id_token; } /** - * Sign the given data with the current private key, or go out - * to the IAM API to sign it. - * @param data The data to be signed. - * @param endpoint A custom endpoint to use. - * - * @example - * ``` - * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); - * ``` + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. */ - async sign(data, endpoint) { - const client = await this.getClient(); - const universe = await this.getUniverseDomain(); - endpoint = - endpoint || - `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; - if (client instanceof impersonated_1.Impersonated) { - const signed = await client.sign(data); - return signed.signedBlob; + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); } - const crypto = (0, crypto_1.createCrypto)(); - if (client instanceof jwtclient_1.JWT && client.key) { - const sign = await crypto.sign(client.key, data); - return sign; + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); } - const creds = await this.getCredentials(); - if (!creds.client_email) { - throw new Error('Cannot sign data without `client_email`.'); + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); } - return this.signBlob(crypto, creds.client_email, data, endpoint); + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; } - async signBlob(crypto, emailOrUniqueId, data, endpoint) { - const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); - const res = await this.request({ - method: 'POST', - url: url.href, - data: { - payload: crypto.encodeBase64StringUtf8(data), - }, - retry: true, - retryConfig: { - httpMethodsToRetry: ['POST'], - }, + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); }); - return res.data.signedBlob; } } -exports.GoogleAuth = GoogleAuth; -/** - * Export DefaultTransporter as a static property of the class. - */ -GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; +exports.UserRefreshClient = UserRefreshClient; /***/ }), -/***/ 9735: -/***/ ((__unused_webpack_module, exports) => { +/***/ 86308: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2014 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27481,42 +59480,110 @@ GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IAMAuth = void 0; -class IAMAuth { +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const querystring = __nccwpck_require__(63477); +const transporters_1 = __nccwpck_require__(72649); +const oauth2common_1 = __nccwpck_require__(19510); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { /** - * IAM credentials. - * - * @param selector the iam authority selector - * @param token the token - * @constructor + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. */ - constructor(selector, token) { - this.selector = selector; - this.token = token; - this.selector = selector; - this.token = token; + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); } /** - * Acquire the HTTP headers required to make an authenticated request. + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. */ - getRequestHeaders() { - return { - 'x-goog-iam-authority-selector': this.selector, - 'x-goog-iam-authorization-token': this.token, + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + ...StsCredentials.RETRY_CONFIG, + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } } } -exports.IAMAuth = IAMAuth; +exports.StsCredentials = StsCredentials; /***/ }), -/***/ 117: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7428: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2021 Google LLC +// Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27530,108 +59597,64 @@ exports.IAMAuth = IAMAuth; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IdentityPoolClient = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const util_1 = __nccwpck_require__(8905); -const filesubjecttokensupplier_1 = __nccwpck_require__(7646); -const urlsubjecttokensupplier_1 = __nccwpck_require__(7428); +exports.UrlSubjectTokenSupplier = void 0; /** - * Defines the Url-sourced and file-sourced external account clients mainly - * used for K8s and Azure workloads. + * Internal subject token supplier implementation used when a URL + * is configured in the credential configuration used to build an {@link IdentityPoolClient} */ -class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { +class UrlSubjectTokenSupplier { /** - * Instantiate an IdentityPoolClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid file-sourced or - * url-sourced credential or a workforce pool user project is provided - * with a non workforce audience. - * @param options The external account options object typically loaded - * from the external account JSON credential file. The camelCased options - * are aliases for the snake_cased options. - * @param additionalOptions **DEPRECATED, all options are available in the - * `options` parameter.** Optional additional behavior customization options. - * These currently customize expiration threshold time and whether to retry - * on 401/403 API request errors. + * Instantiates a URL subject token supplier. + * @param opts The URL subject token supplier options to build the supplier with. */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - const opts = (0, util_1.originalOrCamelOptions)(options); - const credentialSource = opts.get('credential_source'); - const subjectTokenSupplier = opts.get('subject_token_supplier'); - // Validate credential sourcing configuration. - if (!credentialSource && !subjectTokenSupplier) { - throw new Error('A credential source or subject token supplier must be specified.'); - } - if (credentialSource && subjectTokenSupplier) { - throw new Error('Only one of credential source or subject token supplier can be specified.'); - } - if (subjectTokenSupplier) { - this.subjectTokenSupplier = subjectTokenSupplier; - this.credentialSourceType = 'programmatic'; - } - else { - const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); - const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); - // Text is the default format type. - const formatType = formatOpts.get('type') || 'text'; - const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); - if (formatType !== 'json' && formatType !== 'text') { - throw new Error(`Invalid credential_source format "${formatType}"`); - } - if (formatType === 'json' && !formatSubjectTokenFieldName) { - throw new Error('Missing subject_token_field_name for JSON credential_source format'); - } - const file = credentialSourceOpts.get('file'); - const url = credentialSourceOpts.get('url'); - const headers = credentialSourceOpts.get('headers'); - if (file && url) { - throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); - } - else if (file && !url) { - this.credentialSourceType = 'file'; - this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ - filePath: file, - formatType: formatType, - subjectTokenFieldName: formatSubjectTokenFieldName, - }); - } - else if (!file && url) { - this.credentialSourceType = 'url'; - this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ - url: url, - formatType: formatType, - subjectTokenFieldName: formatSubjectTokenFieldName, - headers: headers, - additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, - }); - } - else { - throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); - } - } + constructor(opts) { + this.url = opts.url; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + this.headers = opts.headers; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; } /** - * Triggered when a external subject token is needed to be exchanged for a GCP - * access token via GCP STS endpoint. Gets a subject token by calling - * the configured {@link SubjectTokenSupplier} - * @return A promise that resolves with the external subject token. + * Sends a GET request to the URL provided in the constructor and resolves + * with the returned external subject token. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. */ - async retrieveSubjectToken() { - return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); + async getSubjectToken(context) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.url, + method: 'GET', + headers: this.headers, + responseType: this.formatType, + }; + let subjectToken; + if (this.formatType === 'text') { + const response = await context.transporter.request(opts); + subjectToken = response.data; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const response = await context.transporter.request(opts); + subjectToken = response.data[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; } } -exports.IdentityPoolClient = IdentityPoolClient; +exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; /***/ }), -/***/ 298: +/***/ 14693: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2020 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27644,238 +59667,135 @@ exports.IdentityPoolClient = IdentityPoolClient; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +/* global window */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.IdTokenClient = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -class IdTokenClient extends oauth2client_1.OAuth2Client { - /** - * Google ID Token client - * - * Retrieve ID token from the metadata server. - * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server - */ - constructor(options) { - super(options); - this.targetAudience = options.targetAudience; - this.idTokenProvider = options.idTokenProvider; - } - async getRequestMetadataAsync( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - url) { - if (!this.credentials.id_token || - !this.credentials.expiry_date || - this.isTokenExpiring()) { - const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); - this.credentials = { - id_token: idToken, - expiry_date: this.getIdTokenExpiryDate(idToken), - }; +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(26463); +const crypto_1 = __nccwpck_require__(78043); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); } - const headers = { - Authorization: 'Bearer ' + this.credentials.id_token, - }; - return { headers }; } - getIdTokenExpiryDate(idToken) { - const payloadB64 = idToken.split('.')[1]; - if (payloadB64) { - const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); - return payload.exp * 1000; + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; } + return base64; } -} -exports.IdTokenClient = IdTokenClient; - - -/***/ }), - -/***/ 1103: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -const gaxios_1 = __nccwpck_require__(9555); -exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; -class Impersonated extends oauth2client_1.OAuth2Client { - /** - * Impersonated service account credentials. - * - * Create a new access token by impersonating another service account. - * - * Impersonated Credentials allowing credentials issued to a user or - * service account to impersonate another. The source project using - * Impersonated Credentials must enable the "IAMCredentials" API. - * Also, the target service account must grant the orginating principal - * the "Service Account Token Creator" IAM role. - * - * @param {object} options - The configuration object. - * @param {object} [options.sourceClient] the source credential used as to - * acquire the impersonated credentials. - * @param {string} [options.targetPrincipal] the service account to - * impersonate. - * @param {string[]} [options.delegates] the chained list of delegates - * required to grant the final access_token. If set, the sequence of - * identities must have "Service Account Token Creator" capability granted to - * the preceding identity. For example, if set to [serviceAccountB, - * serviceAccountC], the sourceCredential must have the Token Creator role on - * serviceAccountB. serviceAccountB must have the Token Creator on - * serviceAccountC. Finally, C must have Token Creator on target_principal. - * If left unset, sourceCredential must have that role on targetPrincipal. - * @param {string[]} [options.targetScopes] scopes to request during the - * authorization grant. - * @param {number} [options.lifetime] number of seconds the delegated - * credential should be valid for up to 3600 seconds by default, or 43,200 - * seconds by extending the token's lifetime, see: - * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth - * @param {string} [options.endpoint] api endpoint override. - */ - constructor(options = {}) { - var _a, _b, _c, _d, _e, _f; - super(options); - // Start with an expired refresh token, which will automatically be - // refreshed before the first API call is made. - this.credentials = { - expiry_date: 1, - refresh_token: 'impersonated-placeholder', + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, }; - this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); - this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; - this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; - this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; - this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; - this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : 'https://iamcredentials.googleapis.com'; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; } - /** - * Signs some bytes. - * - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} - * @param blobToSign String to sign. - * @return denoting the keyyID and signedBlob in base64 string - */ - async sign(blobToSign) { - await this.sourceClient.getAccessToken(); - const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; - const u = `${this.endpoint}/v1/${name}:signBlob`; - const body = { - delegates: this.delegates, - payload: Buffer.from(blobToSign).toString('base64'), + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, }; - const res = await this.sourceClient.request({ - ...Impersonated.RETRY_CONFIG, - url: u, - data: body, - method: 'POST', - }); - return res.data; + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); } - /** The service account email to be impersonated. */ - getTargetPrincipal() { - return this.targetPrincipal; + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const result = new TextDecoder().decode(uint8array); + return result; } - /** - * Refreshes the access token. - */ - async refreshToken() { - var _a, _b, _c, _d, _e, _f; - try { - await this.sourceClient.getAccessToken(); - const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; - const u = `${this.endpoint}/v1/${name}:generateAccessToken`; - const body = { - delegates: this.delegates, - scope: this.targetScopes, - lifetime: this.lifetime + 's', - }; - const res = await this.sourceClient.request({ - ...Impersonated.RETRY_CONFIG, - url: u, - data: body, - method: 'POST', - }); - const tokenResponse = res.data; - this.credentials.access_token = tokenResponse.accessToken; - this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); - return { - tokens: this.credentials, - res, - }; - } - catch (error) { - if (!(error instanceof Error)) - throw error; - let status = 0; - let message = ''; - if (error instanceof gaxios_1.GaxiosError) { - status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; - message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; - } - if (status && message) { - error.message = `${status}: unable to impersonate: ${message}`; - throw error; - } - else { - error.message = `unable to impersonate: ${error}`; - throw error; - } - } + encodeBase64StringUtf8(text) { + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; } /** - * Generates an OpenID Connect ID token for a service account. - * - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} - * - * @param targetAudience the audience for the fetched ID token. - * @param options the for the request - * @return an OpenID Connect ID token + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. */ - async fetchIdToken(targetAudience, options) { - var _a; - await this.sourceClient.getAccessToken(); - const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; - const u = `${this.endpoint}/v1/${name}:generateIdToken`; - const body = { - delegates: this.delegates, - audience: targetAudience, - includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, - }; - const res = await this.sourceClient.request({ - ...Impersonated.RETRY_CONFIG, - url: u, - data: body, - method: 'POST', - }); - return res.data.token; + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + // eslint-disable-next-line node/no-unsupported-features/node-builtins + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); } } -exports.Impersonated = Impersonated; +exports.BrowserCrypto = BrowserCrypto; /***/ }), -/***/ 8740: +/***/ 78043: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2015 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -27888,194 +59808,50 @@ exports.Impersonated = Impersonated; // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +/* global window */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JWTAccess = void 0; -const jws = __nccwpck_require__(4636); -const util_1 = __nccwpck_require__(8905); -const DEFAULT_HEADER = { - alg: 'RS256', - typ: 'JWT', -}; -class JWTAccess { - /** - * JWTAccess service account credentials. - * - * Create a new access token by using the credential to create a new JWT token - * that's recognized as the access token. - * - * @param email the service account email address. - * @param key the private key that will be used to sign the token. - * @param keyId the ID of the private key used to sign the token. - */ - constructor(email, key, keyId, eagerRefreshThresholdMillis) { - this.cache = new util_1.LRUCache({ - capacity: 500, - maxAge: 60 * 60 * 1000, - }); - this.email = email; - this.key = key; - this.keyId = keyId; - this.eagerRefreshThresholdMillis = - eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; - } - /** - * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url - * - * @param url The URI being authorized. - * @param scopes The scope or scopes being authorized - * @returns A string that returns the cached key. - */ - getCachedKey(url, scopes) { - let cacheKey = url; - if (scopes && Array.isArray(scopes) && scopes.length) { - cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; - } - else if (typeof scopes === 'string') { - cacheKey = url ? `${url}_${scopes}` : scopes; - } - if (!cacheKey) { - throw Error('Scopes or url must be provided'); - } - return cacheKey; - } - /** - * Get a non-expired access token, after refreshing if necessary. - * - * @param url The URI being authorized. - * @param additionalClaims An object with a set of additional claims to - * include in the payload. - * @returns An object that includes the authorization header. - */ - getRequestHeaders(url, additionalClaims, scopes) { - // Return cached authorization headers, unless we are within - // eagerRefreshThresholdMillis ms of them expiring: - const key = this.getCachedKey(url, scopes); - const cachedToken = this.cache.get(key); - const now = Date.now(); - if (cachedToken && - cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { - return cachedToken.headers; - } - const iat = Math.floor(Date.now() / 1000); - const exp = JWTAccess.getExpirationTime(iat); - let defaultClaims; - // Turn scopes into space-separated string - if (Array.isArray(scopes)) { - scopes = scopes.join(' '); - } - // If scopes are specified, sign with scopes - if (scopes) { - defaultClaims = { - iss: this.email, - sub: this.email, - scope: scopes, - exp, - iat, - }; - } - else { - defaultClaims = { - iss: this.email, - sub: this.email, - aud: url, - exp, - iat, - }; - } - // if additionalClaims are provided, ensure they do not collide with - // other required claims. - if (additionalClaims) { - for (const claim in defaultClaims) { - if (additionalClaims[claim]) { - throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); - } - } - } - const header = this.keyId - ? { ...DEFAULT_HEADER, kid: this.keyId } - : DEFAULT_HEADER; - const payload = Object.assign(defaultClaims, additionalClaims); - // Sign the jwt and add it to the cache - const signedJWT = jws.sign({ header, payload, secret: this.key }); - const headers = { Authorization: `Bearer ${signedJWT}` }; - this.cache.set(key, { - expiration: exp * 1000, - headers, - }); - return headers; - } - /** - * Returns an expiration time for the JWT token. - * - * @param iat The issued at time for the JWT. - * @returns An expiration time for the JWT. - */ - static getExpirationTime(iat) { - const exp = iat + 3600; // 3600 seconds = 1 hour - return exp; - } - /** - * Create a JWTAccess credentials instance using the given input options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the service account auth settings.'); - } - if (!json.client_email) { - throw new Error('The incoming JSON object does not contain a client_email field'); - } - if (!json.private_key) { - throw new Error('The incoming JSON object does not contain a private_key field'); - } - // Extract the relevant information from the json key file. - this.email = json.client_email; - this.key = json.private_key; - this.keyId = json.private_key_id; - this.projectId = json.project_id; - } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); - } - else { - return this.fromStreamAsync(inputStream); - } - } - fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - reject(new Error('Must pass in a stream containing the service account auth settings.')); - } - let s = ''; - inputStream - .setEncoding('utf8') - .on('data', chunk => (s += chunk)) - .on('error', reject) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - resolve(); - } - catch (err) { - reject(err); - } - }); - }); +exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; +const crypto_1 = __nccwpck_require__(14693); +const crypto_2 = __nccwpck_require__(30757); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); } + return new crypto_2.NodeCrypto(); } -exports.JWTAccess = JWTAccess; +exports.createCrypto = createCrypto; +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +exports.hasBrowserCrypto = hasBrowserCrypto; +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} +exports.fromArrayBufferToHex = fromArrayBufferToHex; /***/ }), -/***/ 3959: +/***/ 30757: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2013 Google LLC +// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -28089,281 +59865,155 @@ exports.JWTAccess = JWTAccess; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.JWT = void 0; -const gtoken_1 = __nccwpck_require__(6031); -const jwtaccess_1 = __nccwpck_require__(8740); -const oauth2client_1 = __nccwpck_require__(3936); -const authclient_1 = __nccwpck_require__(4627); -class JWT extends oauth2client_1.OAuth2Client { - constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { - const opts = optionsOrEmail && typeof optionsOrEmail === 'object' - ? optionsOrEmail - : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; - super(opts); - this.email = opts.email; - this.keyFile = opts.keyFile; - this.key = opts.key; - this.keyId = opts.keyId; - this.scopes = opts.scopes; - this.subject = opts.subject; - this.additionalClaims = opts.additionalClaims; - // Start with an expired refresh token, which will automatically be - // refreshed before the first API call is made. - this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; - } - /** - * Creates a copy of the credential with the specified scopes. - * @param scopes List of requested scopes or a single scope. - * @return The cloned instance. - */ - createScoped(scopes) { - const jwt = new JWT(this); - jwt.scopes = scopes; - return jwt; - } - /** - * Obtains the metadata to be sent with the request. - * - * @param url the URI being authorized. - */ - async getRequestMetadataAsync(url) { - url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; - const useSelfSignedJWT = (!this.hasUserScopes() && url) || - (this.useJWTAccessWithScope && this.hasAnyScopes()) || - this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; - if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { - throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); - } - if (!this.apiKey && useSelfSignedJWT) { - if (this.additionalClaims && - this.additionalClaims.target_audience) { - const { tokens } = await this.refreshToken(); - return { - headers: this.addSharedMetadataHeaders({ - Authorization: `Bearer ${tokens.id_token}`, - }), - }; - } - else { - // no scopes have been set, but a uri has been provided. Use JWTAccess - // credentials. - if (!this.access) { - this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); - } - let scopes; - if (this.hasUserScopes()) { - scopes = this.scopes; - } - else if (!url) { - scopes = this.defaultScopes; - } - const useScopes = this.useJWTAccessWithScope || - this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; - const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, - // Scopes take precedent over audience for signing, - // so we only provide them if `useJWTAccessWithScope` is on or - // if we are in a non-default universe - useScopes ? scopes : undefined); - return { headers: this.addSharedMetadataHeaders(headers) }; - } - } - else if (this.hasAnyScopes() || this.apiKey) { - return super.getRequestMetadataAsync(url); - } - else { - // If no audience, apiKey, or scopes are provided, we should not attempt - // to populate any headers: - return { headers: {} }; - } - } - /** - * Fetches an ID token. - * @param targetAudience the audience for the fetched ID token. - */ - async fetchIdToken(targetAudience) { - // Create a new gToken for fetching an ID token - const gtoken = new gtoken_1.GoogleToken({ - iss: this.email, - sub: this.subject, - scope: this.scopes || this.defaultScopes, - keyFile: this.keyFile, - key: this.key, - additionalClaims: { target_audience: targetAudience }, - transporter: this.transporter, - }); - await gtoken.getToken({ - forceRefresh: true, - }); - if (!gtoken.idToken) { - throw new Error('Unknown error: Failed to fetch ID token'); - } - return gtoken.idToken; - } - /** - * Determine if there are currently scopes available. - */ - hasUserScopes() { - if (!this.scopes) { - return false; - } - return this.scopes.length > 0; - } - /** - * Are there any default or user scopes defined. - */ - hasAnyScopes() { - if (this.scopes && this.scopes.length > 0) - return true; - if (this.defaultScopes && this.defaultScopes.length > 0) - return true; - return false; - } - authorize(callback) { - if (callback) { - this.authorizeAsync().then(r => callback(null, r), callback); - } - else { - return this.authorizeAsync(); - } - } - async authorizeAsync() { - const result = await this.refreshToken(); - if (!result) { - throw new Error('No result returned'); - } - this.credentials = result.tokens; - this.credentials.refresh_token = 'jwt-placeholder'; - this.key = this.gtoken.key; - this.email = this.gtoken.iss; - return result.tokens; +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6113); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); } - /** - * Refreshes the access token. - * @param refreshToken ignored - * @private - */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - const gtoken = this.createGToken(); - const token = await gtoken.getToken({ - forceRefresh: this.isTokenExpiring(), - }); - const tokens = { - access_token: token.access_token, - token_type: 'Bearer', - expiry_date: gtoken.expiresAt, - id_token: gtoken.idToken, - }; - this.emit('tokens', tokens); - return { res: null, tokens }; + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); } - /** - * Create a gToken if it doesn't already exist. - */ - createGToken() { - if (!this.gtoken) { - this.gtoken = new gtoken_1.GoogleToken({ - iss: this.email, - sub: this.subject, - scope: this.scopes || this.defaultScopes, - keyFile: this.keyFile, - key: this.key, - additionalClaims: this.additionalClaims, - transporter: this.transporter, - }); - } - return this.gtoken; + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); } - /** - * Create a JWT credentials instance using the given input options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the service account auth settings.'); - } - if (!json.client_email) { - throw new Error('The incoming JSON object does not contain a client_email field'); - } - if (!json.private_key) { - throw new Error('The incoming JSON object does not contain a private_key field'); - } - // Extract the relevant information from the json key file. - this.email = json.client_email; - this.key = json.private_key; - this.keyId = json.private_key_id; - this.projectId = json.project_id; - this.quotaProjectId = json.quota_project_id; - this.universeDomain = json.universe_domain || this.universeDomain; + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); - } - else { - return this.fromStreamAsync(inputStream); - } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); } - fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - throw new Error('Must pass in a stream containing the service account auth settings.'); - } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - resolve(); - } - catch (e) { - reject(e); - } - }); - }); + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); } /** - * Creates a JWT credentials instance using an API Key for authentication. - * @param apiKey The API Key in string form. + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. */ - fromAPIKey(apiKey) { - if (typeof apiKey !== 'string') { - throw new Error('Must provide an API Key string.'); - } - this.apiKey = apiKey; + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); } /** - * Using the key or keyFile on the JWT client, obtain an object that contains - * the key and the client email. - */ - async getCredentials() { - if (this.key) { - return { private_key: this.key, client_email: this.email }; - } - else if (this.keyFile) { - const gtoken = this.createGToken(); - const creds = await gtoken.getCredentials(this.keyFile); - return { private_key: creds.privateKey, client_email: creds.clientEmail }; - } - throw new Error('A key or a keyFile must be provided to getCredentials.'); + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); } } -exports.JWT = JWT; +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + + +/***/ }), + +/***/ 20810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(20695); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +// Export common deps to ensure types/instances are the exact match. Useful +// for consistently configuring the library across versions. +exports.gcpMetadata = __nccwpck_require__(3563); +exports.gaxios = __nccwpck_require__(59555); +var authclient_1 = __nccwpck_require__(44627); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(96875); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(21380); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(39735); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(80298); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(68740); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(13959); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(91103); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(3936); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +var loginticket_1 = __nccwpck_require__(74524); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(98790); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(71569); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var identitypoolclient_1 = __nccwpck_require__(20117); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(94381); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(40810); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(6270); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(44782); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +var passthrough_1 = __nccwpck_require__(32460); +Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); +var transporters_1 = __nccwpck_require__(72649); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; /***/ }), -/***/ 4524: +/***/ 16608: /***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2014 Google LLC +// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -28377,53 +60027,31 @@ exports.JWT = JWT; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.LoginTicket = void 0; -class LoginTicket { - /** - * Create a simple class to extract user ID from an ID Token - * - * @param {string} env Envelope of the jwt - * @param {TokenPayload} pay Payload of the jwt - * @constructor - */ - constructor(env, pay) { - this.envelope = env; - this.payload = pay; - } - getEnvelope() { - return this.envelope; - } - getPayload() { - return this.payload; - } - /** - * Create a simple class to extract user ID from an ID Token - * - * @return The user ID - */ - getUserId() { - const payload = this.getPayload(); - if (payload && payload.sub) { - return payload.sub; +exports.validate = void 0; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); } - return null; - } - /** - * Returns attributes from the login ticket. This can contain - * various information about the user session. - * - * @return The envelope and payload - */ - getAttributes() { - return { envelope: this.getEnvelope(), payload: this.getPayload() }; } } -exports.LoginTicket = LoginTicket; +exports.validate = validate; /***/ }), -/***/ 3936: +/***/ 72649: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -28442,9995 +60070,9994 @@ exports.LoginTicket = LoginTicket; // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OAuth2Client = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const querystring = __nccwpck_require__(3477); -const stream = __nccwpck_require__(2781); -const formatEcdsa = __nccwpck_require__(1728); -const crypto_1 = __nccwpck_require__(8043); -const authclient_1 = __nccwpck_require__(4627); -const loginticket_1 = __nccwpck_require__(4524); -var CodeChallengeMethod; -(function (CodeChallengeMethod) { - CodeChallengeMethod["Plain"] = "plain"; - CodeChallengeMethod["S256"] = "S256"; -})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); -var CertificateFormat; -(function (CertificateFormat) { - CertificateFormat["PEM"] = "PEM"; - CertificateFormat["JWK"] = "JWK"; -})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); -class OAuth2Client extends authclient_1.AuthClient { - constructor(optionsOrClientId, clientSecret, redirectUri) { - const opts = optionsOrClientId && typeof optionsOrClientId === 'object' - ? optionsOrClientId - : { clientId: optionsOrClientId, clientSecret, redirectUri }; - super(opts); - this.certificateCache = {}; - this.certificateExpiry = null; - this.certificateCacheFormat = CertificateFormat.PEM; - this.refreshTokenPromises = new Map(); - this._clientId = opts.clientId; - this._clientSecret = opts.clientSecret; - this.redirectUri = opts.redirectUri; - this.endpoints = { - tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', - oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', - oauth2TokenUrl: 'https://oauth2.googleapis.com/token', - oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', - oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', - oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', - oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', - ...opts.endpoints, - }; - this.issuers = opts.issuers || [ - 'accounts.google.com', - 'https://accounts.google.com', - this.universeDomain, - ]; - } - /** - * Generates URL for consent page landing. - * @param opts Options. - * @return URL to consent page. - */ - generateAuthUrl(opts = {}) { - if (opts.code_challenge_method && !opts.code_challenge) { - throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); - } - opts.response_type = opts.response_type || 'code'; - opts.client_id = opts.client_id || this._clientId; - opts.redirect_uri = opts.redirect_uri || this.redirectUri; - // Allow scopes to be passed either as array or a string - if (Array.isArray(opts.scope)) { - opts.scope = opts.scope.join(' '); - } - const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); - return (rootUrl + - '?' + - querystring.stringify(opts)); - } - generateCodeVerifier() { - // To make the code compatible with browser SubtleCrypto we need to make - // this method async. - throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); - } - /** - * Convenience method to automatically generate a code_verifier, and its - * resulting SHA256. If used, this must be paired with a S256 - * code_challenge_method. - * - * For a full example see: - * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js - */ - async generateCodeVerifierAsync() { - // base64 encoding uses 6 bits per character, and we want to generate128 - // characters. 6*128/8 = 96. - const crypto = (0, crypto_1.createCrypto)(); - const randomString = crypto.randomBytesBase64(96); - // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ - // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just - // swapping out a few chars. - const codeVerifier = randomString - .replace(/\+/g, '~') - .replace(/=/g, '_') - .replace(/\//g, '-'); - // Generate the base64 encoded SHA256 - const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); - // We need to use base64UrlEncoding instead of standard base64 - const codeChallenge = unencodedCodeChallenge - .split('=')[0] - .replace(/\+/g, '-') - .replace(/\//g, '_'); - return { codeVerifier, codeChallenge }; - } - getToken(codeOrOptions, callback) { - const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; - if (callback) { - this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); - } - else { - return this.getTokenAsync(options); - } - } - async getTokenAsync(options) { - const url = this.endpoints.oauth2TokenUrl.toString(); - const values = { - code: options.code, - client_id: options.client_id || this._clientId, - client_secret: this._clientSecret, - redirect_uri: options.redirect_uri || this.redirectUri, - grant_type: 'authorization_code', - code_verifier: options.codeVerifier, - }; - const res = await this.transporter.request({ - ...OAuth2Client.RETRY_CONFIG, - method: 'POST', - url, - data: querystring.stringify(values), - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - }); - const tokens = res.data; - if (res.data && res.data.expires_in) { - tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res }; +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(59555); +const options_1 = __nccwpck_require__(16608); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(51402); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); } /** - * Refreshes the access token. - * @param refresh_token Existing refresh token. - * @private + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. */ - async refreshToken(refreshToken) { - if (!refreshToken) { - return this.refreshTokenNoCache(refreshToken); - } - // If a request to refresh using the same token has started, - // return the same promise. - if (this.refreshTokenPromises.has(refreshToken)) { - return this.refreshTokenPromises.get(refreshToken); - } - const p = this.refreshTokenNoCache(refreshToken).then(r => { - this.refreshTokenPromises.delete(refreshToken); - return r; - }, e => { - this.refreshTokenPromises.delete(refreshToken); - throw e; - }); - this.refreshTokenPromises.set(refreshToken, p); - return p; - } - async refreshTokenNoCache(refreshToken) { - var _a; - if (!refreshToken) { - throw new Error('No refresh token is set.'); - } - const url = this.endpoints.oauth2TokenUrl.toString(); - const data = { - refresh_token: refreshToken, - client_id: this._clientId, - client_secret: this._clientSecret, - grant_type: 'refresh_token', - }; - let res; - try { - // request for new token - res = await this.transporter.request({ - ...OAuth2Client.RETRY_CONFIG, - method: 'POST', - url, - data: querystring.stringify(data), - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - }); - } - catch (e) { - if (e instanceof gaxios_1.GaxiosError && - e.message === 'invalid_grant' && - ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && - /ReAuth/i.test(e.response.data.error_description)) { - e.message = JSON.stringify(e.response.data); - } - throw e; - } - const tokens = res.data; - // TODO: de-duplicate this code from a few spots - if (res.data && res.data.expires_in) { - tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; - delete tokens.expires_in; - } - this.emit('tokens', tokens); - return { tokens, res }; - } - refreshAccessToken(callback) { - if (callback) { - this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); - } - else { - return this.refreshAccessTokenAsync(); - } - } - async refreshAccessTokenAsync() { - const r = await this.refreshToken(this.credentials.refresh_token); - const tokens = r.tokens; - tokens.refresh_token = this.credentials.refresh_token; - this.credentials = tokens; - return { credentials: this.credentials, res: r.res }; - } - getAccessToken(callback) { - if (callback) { - this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); - } - else { - return this.getAccessTokenAsync(); - } - } - async getAccessTokenAsync() { - const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); - if (shouldRefresh) { - if (!this.credentials.refresh_token) { - if (this.refreshHandler) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - return { token: this.credentials.access_token }; - } - } - else { - throw new Error('No refresh token or refresh handler callback is set.'); - } - } - const r = await this.refreshAccessTokenAsync(); - if (!r.credentials || (r.credentials && !r.credentials.access_token)) { - throw new Error('Could not refresh access token.'); + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; } - return { token: r.credentials.access_token, res: r.res }; - } - else { - return { token: this.credentials.access_token }; - } - } - /** - * The main authentication interface. It takes an optional url which when - * present is the endpoint being accessed, and returns a Promise which - * resolves with authorization header fields. - * - * In OAuth2Client, the result has the form: - * { Authorization: 'Bearer ' } - * @param url The optional url being authorized - */ - async getRequestHeaders(url) { - const headers = (await this.getRequestMetadataAsync(url)).headers; - return headers; - } - async getRequestMetadataAsync( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - url) { - const thisCreds = this.credentials; - if (!thisCreds.access_token && - !thisCreds.refresh_token && - !this.apiKey && - !this.refreshHandler) { - throw new Error('No access, refresh token, API key or refresh handler callback is set.'); - } - if (thisCreds.access_token && !this.isTokenExpiring()) { - thisCreds.token_type = thisCreds.token_type || 'Bearer'; - const headers = { - Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers) }; - } - // If refreshHandler exists, call processAndValidateRefreshHandler(). - if (this.refreshHandler) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - const headers = { - Authorization: 'Bearer ' + this.credentials.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers) }; + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = + `${uaValue} ${DefaultTransporter.USER_AGENT}`; } - } - if (this.apiKey) { - return { headers: { 'X-Goog-Api-Key': this.apiKey } }; - } - let r = null; - let tokens = null; - try { - r = await this.refreshToken(thisCreds.refresh_token); - tokens = r.tokens; - } - catch (err) { - const e = err; - if (e.response && - (e.response.status === 403 || e.response.status === 404)) { - e.message = `Could not refresh access token: ${e.message}`; + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; } - throw e; } - const credentials = this.credentials; - credentials.token_type = credentials.token_type || 'Bearer'; - tokens.refresh_token = credentials.refresh_token; - this.credentials = tokens; - const headers = { - Authorization: credentials.token_type + ' ' + tokens.access_token, - }; - return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; - } - /** - * Generates an URL to revoke the given token. - * @param token The existing token to be revoked. - * - * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} - */ - static getRevokeTokenUrl(token) { - return new OAuth2Client().getRevokeTokenURL(token).toString(); + return opts; } /** - * Generates a URL to revoke the given token. - * - * @param token The existing token to be revoked. + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. */ - getRevokeTokenURL(token) { - const url = new URL(this.endpoints.oauth2RevokeUrl); - url.searchParams.append('token', token); - return url; - } - revokeToken(token, callback) { - const opts = { - ...OAuth2Client.RETRY_CONFIG, - url: this.getRevokeTokenURL(token).toString(), - method: 'POST', - }; - if (callback) { - this.transporter - .request(opts) - .then(r => callback(null, r), callback); - } - else { - return this.transporter.request(opts); - } - } - revokeCredentials(callback) { - if (callback) { - this.revokeCredentialsAsync().then(res => callback(null, res), callback); - } - else { - return this.revokeCredentialsAsync(); - } + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); } - async revokeCredentialsAsync() { - const token = this.credentials.access_token; - this.credentials = {}; - if (token) { - return this.revokeToken(token); - } - else { - throw new Error('No access token to revoke.'); - } + get defaults() { + return this.instance.defaults; } - request(opts, callback) { - if (callback) { - this.requestAsync(opts).then(r => callback(null, r), e => { - return callback(e, e.response); - }); - } - else { - return this.requestAsync(opts); - } + set defaults(opts) { + this.instance.defaults = opts; } - async requestAsync(opts, reAuthRetried = false) { - let r2; - try { - const r = await this.getRequestMetadataAsync(opts.url); - opts.headers = opts.headers || {}; - if (r.headers && r.headers['x-goog-user-project']) { - opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; } - if (r.headers && r.headers.Authorization) { - opts.headers.Authorization = r.headers.Authorization; + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; } - if (this.apiKey) { - opts.headers['X-Goog-Api-Key'] = this.apiKey; + else { + err.message = body.error.message; + err.code = body.error.code; } - r2 = await this.transporter.request(opts); } - catch (e) { - const res = e.response; - if (res) { - const statusCode = res.status; - // Retry the request for metadata if the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - An access_token and refresh_token were available, but either no - // expiry_date was available or the forceRefreshOnFailure flag is set. - // The absent expiry_date case can happen when developers stash the - // access_token and refresh_token for later use, but the access_token - // fails on the first try because it's expired. Some developers may - // choose to enable forceRefreshOnFailure to mitigate time-related - // errors. - // Or the following criteria are true: - // - We haven't already retried. It only makes sense to retry once. - // - The response was a 401 or a 403 - // - The request didn't send a readableStream - // - No refresh_token was available - // - An access_token and a refreshHandler callback were available, but - // either no expiry_date was available or the forceRefreshOnFailure - // flag is set. The access_token fails on the first try because it's - // expired. Some developers may choose to enable forceRefreshOnFailure - // to mitigate time-related errors. - const mayRequireRefresh = this.credentials && - this.credentials.access_token && - this.credentials.refresh_token && - (!this.credentials.expiry_date || this.forceRefreshOnFailure); - const mayRequireRefreshWithNoRefreshToken = this.credentials && - this.credentials.access_token && - !this.credentials.refresh_token && - (!this.credentials.expiry_date || this.forceRefreshOnFailure) && - this.refreshHandler; - const isReadableStream = res.config.data instanceof stream.Readable; - const isAuthErr = statusCode === 401 || statusCode === 403; - if (!reAuthRetried && - isAuthErr && - !isReadableStream && - mayRequireRefresh) { - await this.refreshAccessTokenAsync(); - return this.requestAsync(opts, true); - } - else if (!reAuthRetried && - isAuthErr && - !isReadableStream && - mayRequireRefreshWithNoRefreshToken) { - const refreshedAccessToken = await this.processAndValidateRefreshHandler(); - if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { - this.setCredentials(refreshedAccessToken); - } - return this.requestAsync(opts, true); - } - } - throw e; + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; } - return r2; + return err; } - verifyIdToken(options, callback) { - // This function used to accept two arguments instead of an options object. - // Check the types to help users upgrade with less pain. - // This check can be removed after a 2.0 release. - if (callback && typeof callback !== 'function') { - throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); - } - if (callback) { - this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); - } - else { - return this.verifyIdTokenAsync(options); - } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + + +/***/ }), + +/***/ 68905: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +exports.snakeToCamel = snakeToCamel; +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; } - async verifyIdTokenAsync(options) { - if (!options.idToken) { - throw new Error('The verifyIdToken method requires an ID Token'); - } - const response = await this.getFederatedSignonCertsAsync(); - const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); - return login; + return { get }; +} +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; } /** - * Obtains information about the provisioned access token. Especially useful - * if you want to check the scopes that were provisioned to a given token. + * Add an item to the cache. * - * @param accessToken Required. The Access Token for which you want to get - * user info. + * @param key the key to upsert + * @param value the value of the key */ - async getTokenInfo(accessToken) { - const { data } = await this.transporter.request({ - ...OAuth2Client.RETRY_CONFIG, - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - Authorization: `Bearer ${accessToken}`, - }, - url: this.endpoints.tokenInfoUrl.toString(), - }); - const info = Object.assign({ - expiry_date: new Date().getTime() + data.expires_in * 1000, - scopes: data.scope.split(' '), - }, data); - delete info.expires_in; - delete info.scope; - return info; + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); } - getFederatedSignonCerts(callback) { - if (callback) { - this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); - } - else { - return this.getFederatedSignonCertsAsync(); - } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; } - async getFederatedSignonCertsAsync() { - const nowTime = new Date().getTime(); - const format = (0, crypto_1.hasBrowserCrypto)() - ? CertificateFormat.JWK - : CertificateFormat.PEM; - if (this.certificateExpiry && - nowTime < this.certificateExpiry.getTime() && - this.certificateCacheFormat === format) { - return { certs: this.certificateCache, format }; - } - let res; - let url; - switch (format) { - case CertificateFormat.PEM: - url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); - break; - case CertificateFormat.JWK: - url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); - break; - default: - throw new Error(`Unsupported certificate format ${format}`); - } - try { - res = await this.transporter.request({ - ...OAuth2Client.RETRY_CONFIG, - url, - }); - } - catch (e) { - if (e instanceof Error) { - e.message = `Failed to retrieve verification certificates: ${e.message}`; - } - throw e; - } - const cacheControl = res ? res.headers['cache-control'] : undefined; - let cacheAge = -1; - if (cacheControl) { - const pattern = new RegExp('max-age=([0-9]*)'); - const regexResult = pattern.exec(cacheControl); - if (regexResult && regexResult.length === 2) { - // Cache results with max-age (in seconds) - cacheAge = Number(regexResult[1]) * 1000; // milliseconds - } - } - let certificates = {}; - switch (format) { - case CertificateFormat.PEM: - certificates = res.data; - break; - case CertificateFormat.JWK: - for (const key of res.data.keys) { - certificates[key.kid] = key; - } - break; - default: - throw new Error(`Unsupported certificate format ${format}`); - } - const now = new Date(); - this.certificateExpiry = - cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); - this.certificateCache = certificates; - this.certificateCacheFormat = format; - return { certs: certificates, format, res }; +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); } - getIapPublicKeys(callback) { - if (callback) { - this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); - } - else { - return this.getIapPublicKeysAsync(); - } +}; + + +/***/ }), + +/***/ 76031: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(57147); +const gaxios_1 = __nccwpck_require__(59555); +const jws = __nccwpck_require__(4636); +const path = __nccwpck_require__(71017); +const util_1 = __nccwpck_require__(73837); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; } - async getIapPublicKeysAsync() { - let res; - const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); - try { - res = await this.transporter.request({ - ...OAuth2Client.RETRY_CONFIG, - url, - }); - } - catch (e) { - if (e instanceof Error) { - e.message = `Failed to retrieve verification certificates: ${e.message}`; - } - throw e; - } - return { pubkeys: res.data, res }; +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; } - verifySignedJwtWithCerts() { - // To make the code compatible with browser SubtleCrypto we need to make - // this method async. - throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; } /** - * Verify the id token is signed with the correct certificate - * and is from the correct audience. - * @param jwt The jwt to verify (The ID Token in this case). - * @param certs The array of certs to test the jwt against. - * @param requiredAudience The audience to test the jwt against. - * @param issuers The allowed issuers of the jwt (Optional). - * @param maxExpiry The max expiry the certificate can be (Optional). - * @return Returns a promise resolving to LoginTicket on verification. + * Create a GoogleToken. + * + * @param options Configuration object. */ - async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { - const crypto = (0, crypto_1.createCrypto)(); - if (!maxExpiry) { - maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; - } - const segments = jwt.split('.'); - if (segments.length !== 3) { - throw new Error('Wrong number of segments in token: ' + jwt); - } - const signed = segments[0] + '.' + segments[1]; - let signature = segments[2]; - let envelope; - let payload; - try { - envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); - } - catch (err) { - if (err instanceof Error) { - err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; - } - throw err; - } - if (!envelope) { - throw new Error("Can't parse token envelope: " + segments[0]); - } - try { - payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); - } - catch (err) { - if (err instanceof Error) { - err.message = `Can't parse token payload '${segments[0]}`; - } - throw err; - } - if (!payload) { - throw new Error("Can't parse token payload: " + segments[1]); - } - if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { - // If this is not present, then there's no reason to attempt verification - throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); - } - const cert = certs[envelope.kid]; - if (envelope.alg === 'ES256') { - signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); - } - const verified = await crypto.verify(cert, signed, signature); - if (!verified) { - throw new Error('Invalid token signature: ' + jwt); - } - if (!payload.iat) { - throw new Error('No issue time in token: ' + JSON.stringify(payload)); - } - if (!payload.exp) { - throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; } - const iat = Number(payload.iat); - if (isNaN(iat)) - throw new Error('iat field using invalid format'); - const exp = Number(payload.exp); - if (isNaN(exp)) - throw new Error('exp field using invalid format'); - const now = new Date().getTime() / 1000; - if (exp >= now + maxExpiry) { - throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + else { + return true; } - const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; - const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; - if (now < earliest) { - throw new Error('Token used too early, ' + - now + - ' < ' + - earliest + - ': ' + - JSON.stringify(payload)); + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; } - if (now > latest) { - throw new Error('Token used too late, ' + - now + - ' > ' + - latest + - ': ' + - JSON.stringify(payload)); + else { + return true; } - if (issuers && issuers.indexOf(payload.iss) < 0) { - throw new Error('Invalid issuer, expected one of [' + - issuers + - '], but got ' + - payload.iss); + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; } - // Check the audience matches if we have one - if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { - const aud = payload.aud; - let audVerified = false; - // If the requiredAudience is an array, check if it contains token - // audience - if (requiredAudience.constructor === Array) { - audVerified = requiredAudience.indexOf(aud) > -1; - } - else { - audVerified = aud === requiredAudience; - } - if (!audVerified) { - throw new Error('Wrong recipient, payload audience != requiredAudience'); - } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; } - return new loginticket_1.LoginTicket(envelope, payload); + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); } /** - * Returns a promise that resolves with AccessTokenResponse type if - * refreshHandler is defined. - * If not, nothing is returned. + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties */ - async processAndValidateRefreshHandler() { - if (this.refreshHandler) { - const accessTokenResponse = await this.refreshHandler(); - if (!accessTokenResponse.access_token) { - throw new Error('No access token is returned by the refreshHandler callback.'); + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; } - return accessTokenResponse; + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); } - return; } - /** - * Returns true if a token is expired or will expire within - * eagerRefreshThresholdMillismilliseconds. - * If there is no expiry time, assumes the token is not expired or expiring. - */ - isTokenExpiring() { - const expiryDate = this.credentials.expiry_date; - return expiryDate - ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis - : false; + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); } } -exports.OAuth2Client = OAuth2Client; -/** - * @deprecated use instance's {@link OAuth2Client.endpoints} - */ -OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; -/** - * Clock skew - five minutes in seconds - */ -OAuth2Client.CLOCK_SKEW_SECS_ = 300; +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = /** - * The default max Token Lifetime is one day in seconds + * Request the token from Google. */ -OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 31621: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; /***/ }), -/***/ 9510: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 52589: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; +var __assign=this&&this.__assign||function(){__assign=Object.assign||function(t){for(var s,i=1,n=arguments.length;i'"&]/g,nonAscii:/[<>'"&\u0080-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,nonAsciiPrintable:/[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,nonAsciiPrintableOnly:/[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,extensive:/[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g};var defaultEncodeOptions={mode:"specialChars",level:"all",numeric:"decimal"};function encode(text,_a){var _b=_a===void 0?defaultEncodeOptions:_a,_c=_b.mode,mode=_c===void 0?"specialChars":_c,_d=_b.numeric,numeric=_d===void 0?"decimal":_d,_e=_b.level,level=_e===void 0?"all":_e;if(!text){return""}var encodeRegExp=encodeRegExps[mode];var references=allNamedReferences[level].characters;var isHex=numeric==="hexadecimal";return replaceUsingRegExp(text,encodeRegExp,(function(input){var result=references[input];if(!result){var code=input.length>1?surrogate_pairs_1.getCodePoint(input,0):input.charCodeAt(0);result=(isHex?"&#x"+code.toString(16):"&#"+code)+";"}return result}))}exports.encode=encode;var defaultDecodeOptions={scope:"body",level:"all"};var strict=/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g;var attribute=/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g;var baseDecodeRegExps={xml:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.xml},html4:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.html4},html5:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.html5}};var decodeRegExps=__assign(__assign({},baseDecodeRegExps),{all:baseDecodeRegExps.html5});var fromCharCode=String.fromCharCode;var outOfBoundsChar=fromCharCode(65533);var defaultDecodeEntityOptions={level:"all"};function getDecodedEntity(entity,references,isAttribute,isStrict){var decodeResult=entity;var decodeEntityLastChar=entity[entity.length-1];if(isAttribute&&decodeEntityLastChar==="="){decodeResult=entity}else if(isStrict&&decodeEntityLastChar!==";"){decodeResult=entity}else{var decodeResultByReference=references[entity];if(decodeResultByReference){decodeResult=decodeResultByReference}else if(entity[0]==="&"&&entity[1]==="#"){var decodeSecondChar=entity[2];var decodeCode=decodeSecondChar=="x"||decodeSecondChar=="X"?parseInt(entity.substr(3),16):parseInt(entity.substr(2));decodeResult=decodeCode>=1114111?outOfBoundsChar:decodeCode>65535?surrogate_pairs_1.fromCodePoint(decodeCode):fromCharCode(numeric_unicode_map_1.numericUnicodeMap[decodeCode]||decodeCode)}}return decodeResult}function decodeEntity(entity,_a){var _b=(_a===void 0?defaultDecodeEntityOptions:_a).level,level=_b===void 0?"all":_b;if(!entity){return""}return getDecodedEntity(entity,allNamedReferences[level].entities,false,false)}exports.decodeEntity=decodeEntity;function decode(text,_a){var _b=_a===void 0?defaultDecodeOptions:_a,_c=_b.level,level=_c===void 0?"all":_c,_d=_b.scope,scope=_d===void 0?level==="xml"?"strict":"body":_d;if(!text){return""}var decodeRegExp=decodeRegExps[level][scope];var references=allNamedReferences[level].entities;var isAttribute=scope==="attribute";var isStrict=scope==="strict";return replaceUsingRegExp(text,decodeRegExp,(function(entity){return getDecodedEntity(entity,references,isAttribute,isStrict)}))}exports.decode=decode; +//# sourceMappingURL=./index.js.map + +/***/ }), + +/***/ 6068: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({value:true}));exports.bodyRegExps={xml:/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g,html4:/∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g,html5:/·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g};exports.namedReferences={xml:{entities:{"<":"<",">":">",""":'"',"'":"'","&":"&"},characters:{"<":"<",">":">",'"':""","'":"'","&":"&"}},html4:{entities:{"'":"'"," ":" "," ":" ","¡":"¡","¡":"¡","¢":"¢","¢":"¢","£":"£","£":"£","¤":"¤","¤":"¤","¥":"¥","¥":"¥","¦":"¦","¦":"¦","§":"§","§":"§","¨":"¨","¨":"¨","©":"©","©":"©","ª":"ª","ª":"ª","«":"«","«":"«","¬":"¬","¬":"¬","­":"­","­":"­","®":"®","®":"®","¯":"¯","¯":"¯","°":"°","°":"°","±":"±","±":"±","²":"²","²":"²","³":"³","³":"³","´":"´","´":"´","µ":"µ","µ":"µ","¶":"¶","¶":"¶","·":"·","·":"·","¸":"¸","¸":"¸","¹":"¹","¹":"¹","º":"º","º":"º","»":"»","»":"»","¼":"¼","¼":"¼","½":"½","½":"½","¾":"¾","¾":"¾","¿":"¿","¿":"¿","À":"À","À":"À","Á":"Á","Á":"Á","Â":"Â","Â":"Â","Ã":"Ã","Ã":"Ã","Ä":"Ä","Ä":"Ä","Å":"Å","Å":"Å","Æ":"Æ","Æ":"Æ","Ç":"Ç","Ç":"Ç","È":"È","È":"È","É":"É","É":"É","Ê":"Ê","Ê":"Ê","Ë":"Ë","Ë":"Ë","Ì":"Ì","Ì":"Ì","Í":"Í","Í":"Í","Î":"Î","Î":"Î","Ï":"Ï","Ï":"Ï","Ð":"Ð","Ð":"Ð","Ñ":"Ñ","Ñ":"Ñ","Ò":"Ò","Ò":"Ò","Ó":"Ó","Ó":"Ó","Ô":"Ô","Ô":"Ô","Õ":"Õ","Õ":"Õ","Ö":"Ö","Ö":"Ö","×":"×","×":"×","Ø":"Ø","Ø":"Ø","Ù":"Ù","Ù":"Ù","Ú":"Ú","Ú":"Ú","Û":"Û","Û":"Û","Ü":"Ü","Ü":"Ü","Ý":"Ý","Ý":"Ý","Þ":"Þ","Þ":"Þ","ß":"ß","ß":"ß","à":"à","à":"à","á":"á","á":"á","â":"â","â":"â","ã":"ã","ã":"ã","ä":"ä","ä":"ä","å":"å","å":"å","æ":"æ","æ":"æ","ç":"ç","ç":"ç","è":"è","è":"è","é":"é","é":"é","ê":"ê","ê":"ê","ë":"ë","ë":"ë","ì":"ì","ì":"ì","í":"í","í":"í","î":"î","î":"î","ï":"ï","ï":"ï","ð":"ð","ð":"ð","ñ":"ñ","ñ":"ñ","ò":"ò","ò":"ò","ó":"ó","ó":"ó","ô":"ô","ô":"ô","õ":"õ","õ":"õ","ö":"ö","ö":"ö","÷":"÷","÷":"÷","ø":"ø","ø":"ø","ù":"ù","ù":"ù","ú":"ú","ú":"ú","û":"û","û":"û","ü":"ü","ü":"ü","ý":"ý","ý":"ý","þ":"þ","þ":"þ","ÿ":"ÿ","ÿ":"ÿ",""":'"',""":'"',"&":"&","&":"&","<":"<","<":"<",">":">",">":">","Œ":"Œ","œ":"œ","Š":"Š","š":"š","Ÿ":"Ÿ","ˆ":"ˆ","˜":"˜"," ":" "," ":" "," ":" ","‌":"‌","‍":"‍","‎":"‎","‏":"‏","–":"–","—":"—","‘":"‘","’":"’","‚":"‚","“":"“","”":"”","„":"„","†":"†","‡":"‡","‰":"‰","‹":"‹","›":"›","€":"€","ƒ":"ƒ","Α":"Α","Β":"Β","Γ":"Γ","Δ":"Δ","Ε":"Ε","Ζ":"Ζ","Η":"Η","Θ":"Θ","Ι":"Ι","Κ":"Κ","Λ":"Λ","Μ":"Μ","Ν":"Ν","Ξ":"Ξ","Ο":"Ο","Π":"Π","Ρ":"Ρ","Σ":"Σ","Τ":"Τ","Υ":"Υ","Φ":"Φ","Χ":"Χ","Ψ":"Ψ","Ω":"Ω","α":"α","β":"β","γ":"γ","δ":"δ","ε":"ε","ζ":"ζ","η":"η","θ":"θ","ι":"ι","κ":"κ","λ":"λ","μ":"μ","ν":"ν","ξ":"ξ","ο":"ο","π":"π","ρ":"ρ","ς":"ς","σ":"σ","τ":"τ","υ":"υ","φ":"φ","χ":"χ","ψ":"ψ","ω":"ω","ϑ":"ϑ","ϒ":"ϒ","ϖ":"ϖ","•":"•","…":"…","′":"′","″":"″","‾":"‾","⁄":"⁄","℘":"℘","ℑ":"ℑ","ℜ":"ℜ","™":"™","ℵ":"ℵ","←":"←","↑":"↑","→":"→","↓":"↓","↔":"↔","↵":"↵","⇐":"⇐","⇑":"⇑","⇒":"⇒","⇓":"⇓","⇔":"⇔","∀":"∀","∂":"∂","∃":"∃","∅":"∅","∇":"∇","∈":"∈","∉":"∉","∋":"∋","∏":"∏","∑":"∑","−":"−","∗":"∗","√":"√","∝":"∝","∞":"∞","∠":"∠","∧":"∧","∨":"∨","∩":"∩","∪":"∪","∫":"∫","∴":"∴","∼":"∼","≅":"≅","≈":"≈","≠":"≠","≡":"≡","≤":"≤","≥":"≥","⊂":"⊂","⊃":"⊃","⊄":"⊄","⊆":"⊆","⊇":"⊇","⊕":"⊕","⊗":"⊗","⊥":"⊥","⋅":"⋅","⌈":"⌈","⌉":"⌉","⌊":"⌊","⌋":"⌋","⟨":"〈","⟩":"〉","◊":"◊","♠":"♠","♣":"♣","♥":"♥","♦":"♦"},characters:{"'":"'"," ":" ","¡":"¡","¢":"¢","£":"£","¤":"¤","¥":"¥","¦":"¦","§":"§","¨":"¨","©":"©","ª":"ª","«":"«","¬":"¬","­":"­","®":"®","¯":"¯","°":"°","±":"±","²":"²","³":"³","´":"´","µ":"µ","¶":"¶","·":"·","¸":"¸","¹":"¹","º":"º","»":"»","¼":"¼","½":"½","¾":"¾","¿":"¿","À":"À","Á":"Á","Â":"Â","Ã":"Ã","Ä":"Ä","Å":"Å","Æ":"Æ","Ç":"Ç","È":"È","É":"É","Ê":"Ê","Ë":"Ë","Ì":"Ì","Í":"Í","Î":"Î","Ï":"Ï","Ð":"Ð","Ñ":"Ñ","Ò":"Ò","Ó":"Ó","Ô":"Ô","Õ":"Õ","Ö":"Ö","×":"×","Ø":"Ø","Ù":"Ù","Ú":"Ú","Û":"Û","Ü":"Ü","Ý":"Ý","Þ":"Þ","ß":"ß","à":"à","á":"á","â":"â","ã":"ã","ä":"ä","å":"å","æ":"æ","ç":"ç","è":"è","é":"é","ê":"ê","ë":"ë","ì":"ì","í":"í","î":"î","ï":"ï","ð":"ð","ñ":"ñ","ò":"ò","ó":"ó","ô":"ô","õ":"õ","ö":"ö","÷":"÷","ø":"ø","ù":"ù","ú":"ú","û":"û","ü":"ü","ý":"ý","þ":"þ","ÿ":"ÿ",'"':""","&":"&","<":"<",">":">","Œ":"Œ","œ":"œ","Š":"Š","š":"š","Ÿ":"Ÿ","ˆ":"ˆ","˜":"˜"," ":" "," ":" "," ":" ","‌":"‌","‍":"‍","‎":"‎","‏":"‏","–":"–","—":"—","‘":"‘","’":"’","‚":"‚","“":"“","”":"”","„":"„","†":"†","‡":"‡","‰":"‰","‹":"‹","›":"›","€":"€","ƒ":"ƒ","Α":"Α","Β":"Β","Γ":"Γ","Δ":"Δ","Ε":"Ε","Ζ":"Ζ","Η":"Η","Θ":"Θ","Ι":"Ι","Κ":"Κ","Λ":"Λ","Μ":"Μ","Ν":"Ν","Ξ":"Ξ","Ο":"Ο","Π":"Π","Ρ":"Ρ","Σ":"Σ","Τ":"Τ","Υ":"Υ","Φ":"Φ","Χ":"Χ","Ψ":"Ψ","Ω":"Ω","α":"α","β":"β","γ":"γ","δ":"δ","ε":"ε","ζ":"ζ","η":"η","θ":"θ","ι":"ι","κ":"κ","λ":"λ","μ":"μ","ν":"ν","ξ":"ξ","ο":"ο","π":"π","ρ":"ρ","ς":"ς","σ":"σ","τ":"τ","υ":"υ","φ":"φ","χ":"χ","ψ":"ψ","ω":"ω","ϑ":"ϑ","ϒ":"ϒ","ϖ":"ϖ","•":"•","…":"…","′":"′","″":"″","‾":"‾","⁄":"⁄","℘":"℘","ℑ":"ℑ","ℜ":"ℜ","™":"™","ℵ":"ℵ","←":"←","↑":"↑","→":"→","↓":"↓","↔":"↔","↵":"↵","⇐":"⇐","⇑":"⇑","⇒":"⇒","⇓":"⇓","⇔":"⇔","∀":"∀","∂":"∂","∃":"∃","∅":"∅","∇":"∇","∈":"∈","∉":"∉","∋":"∋","∏":"∏","∑":"∑","−":"−","∗":"∗","√":"√","∝":"∝","∞":"∞","∠":"∠","∧":"∧","∨":"∨","∩":"∩","∪":"∪","∫":"∫","∴":"∴","∼":"∼","≅":"≅","≈":"≈","≠":"≠","≡":"≡","≤":"≤","≥":"≥","⊂":"⊂","⊃":"⊃","⊄":"⊄","⊆":"⊆","⊇":"⊇","⊕":"⊕","⊗":"⊗","⊥":"⊥","⋅":"⋅","⌈":"⌈","⌉":"⌉","⌊":"⌊","⌋":"⌋","〈":"⟨","〉":"⟩","◊":"◊","♠":"♠","♣":"♣","♥":"♥","♦":"♦"}},html5:{entities:{"Æ":"Æ","Æ":"Æ","&":"&","&":"&","Á":"Á","Á":"Á","Ă":"Ă","Â":"Â","Â":"Â","А":"А","𝔄":"𝔄","À":"À","À":"À","Α":"Α","Ā":"Ā","⩓":"⩓","Ą":"Ą","𝔸":"𝔸","⁡":"⁡","Å":"Å","Å":"Å","𝒜":"𝒜","≔":"≔","Ã":"Ã","Ã":"Ã","Ä":"Ä","Ä":"Ä","∖":"∖","⫧":"⫧","⌆":"⌆","Б":"Б","∵":"∵","ℬ":"ℬ","Β":"Β","𝔅":"𝔅","𝔹":"𝔹","˘":"˘","ℬ":"ℬ","≎":"≎","Ч":"Ч","©":"©","©":"©","Ć":"Ć","⋒":"⋒","ⅅ":"ⅅ","ℭ":"ℭ","Č":"Č","Ç":"Ç","Ç":"Ç","Ĉ":"Ĉ","∰":"∰","Ċ":"Ċ","¸":"¸","·":"·","ℭ":"ℭ","Χ":"Χ","⊙":"⊙","⊖":"⊖","⊕":"⊕","⊗":"⊗","∲":"∲","”":"”","’":"’","∷":"∷","⩴":"⩴","≡":"≡","∯":"∯","∮":"∮","ℂ":"ℂ","∐":"∐","∳":"∳","⨯":"⨯","𝒞":"𝒞","⋓":"⋓","≍":"≍","ⅅ":"ⅅ","⤑":"⤑","Ђ":"Ђ","Ѕ":"Ѕ","Џ":"Џ","‡":"‡","↡":"↡","⫤":"⫤","Ď":"Ď","Д":"Д","∇":"∇","Δ":"Δ","𝔇":"𝔇","´":"´","˙":"˙","˝":"˝","`":"`","˜":"˜","⋄":"⋄","ⅆ":"ⅆ","𝔻":"𝔻","¨":"¨","⃜":"⃜","≐":"≐","∯":"∯","¨":"¨","⇓":"⇓","⇐":"⇐","⇔":"⇔","⫤":"⫤","⟸":"⟸","⟺":"⟺","⟹":"⟹","⇒":"⇒","⊨":"⊨","⇑":"⇑","⇕":"⇕","∥":"∥","↓":"↓","⤓":"⤓","⇵":"⇵","̑":"̑","⥐":"⥐","⥞":"⥞","↽":"↽","⥖":"⥖","⥟":"⥟","⇁":"⇁","⥗":"⥗","⊤":"⊤","↧":"↧","⇓":"⇓","𝒟":"𝒟","Đ":"Đ","Ŋ":"Ŋ","Ð":"Ð","Ð":"Ð","É":"É","É":"É","Ě":"Ě","Ê":"Ê","Ê":"Ê","Э":"Э","Ė":"Ė","𝔈":"𝔈","È":"È","È":"È","∈":"∈","Ē":"Ē","◻":"◻","▫":"▫","Ę":"Ę","𝔼":"𝔼","Ε":"Ε","⩵":"⩵","≂":"≂","⇌":"⇌","ℰ":"ℰ","⩳":"⩳","Η":"Η","Ë":"Ë","Ë":"Ë","∃":"∃","ⅇ":"ⅇ","Ф":"Ф","𝔉":"𝔉","◼":"◼","▪":"▪","𝔽":"𝔽","∀":"∀","ℱ":"ℱ","ℱ":"ℱ","Ѓ":"Ѓ",">":">",">":">","Γ":"Γ","Ϝ":"Ϝ","Ğ":"Ğ","Ģ":"Ģ","Ĝ":"Ĝ","Г":"Г","Ġ":"Ġ","𝔊":"𝔊","⋙":"⋙","𝔾":"𝔾","≥":"≥","⋛":"⋛","≧":"≧","⪢":"⪢","≷":"≷","⩾":"⩾","≳":"≳","𝒢":"𝒢","≫":"≫","Ъ":"Ъ","ˇ":"ˇ","^":"^","Ĥ":"Ĥ","ℌ":"ℌ","ℋ":"ℋ","ℍ":"ℍ","─":"─","ℋ":"ℋ","Ħ":"Ħ","≎":"≎","≏":"≏","Е":"Е","IJ":"IJ","Ё":"Ё","Í":"Í","Í":"Í","Î":"Î","Î":"Î","И":"И","İ":"İ","ℑ":"ℑ","Ì":"Ì","Ì":"Ì","ℑ":"ℑ","Ī":"Ī","ⅈ":"ⅈ","⇒":"⇒","∬":"∬","∫":"∫","⋂":"⋂","⁣":"⁣","⁢":"⁢","Į":"Į","𝕀":"𝕀","Ι":"Ι","ℐ":"ℐ","Ĩ":"Ĩ","І":"І","Ï":"Ï","Ï":"Ï","Ĵ":"Ĵ","Й":"Й","𝔍":"𝔍","𝕁":"𝕁","𝒥":"𝒥","Ј":"Ј","Є":"Є","Х":"Х","Ќ":"Ќ","Κ":"Κ","Ķ":"Ķ","К":"К","𝔎":"𝔎","𝕂":"𝕂","𝒦":"𝒦","Љ":"Љ","<":"<","<":"<","Ĺ":"Ĺ","Λ":"Λ","⟪":"⟪","ℒ":"ℒ","↞":"↞","Ľ":"Ľ","Ļ":"Ļ","Л":"Л","⟨":"⟨","←":"←","⇤":"⇤","⇆":"⇆","⌈":"⌈","⟦":"⟦","⥡":"⥡","⇃":"⇃","⥙":"⥙","⌊":"⌊","↔":"↔","⥎":"⥎","⊣":"⊣","↤":"↤","⥚":"⥚","⊲":"⊲","⧏":"⧏","⊴":"⊴","⥑":"⥑","⥠":"⥠","↿":"↿","⥘":"⥘","↼":"↼","⥒":"⥒","⇐":"⇐","⇔":"⇔","⋚":"⋚","≦":"≦","≶":"≶","⪡":"⪡","⩽":"⩽","≲":"≲","𝔏":"𝔏","⋘":"⋘","⇚":"⇚","Ŀ":"Ŀ","⟵":"⟵","⟷":"⟷","⟶":"⟶","⟸":"⟸","⟺":"⟺","⟹":"⟹","𝕃":"𝕃","↙":"↙","↘":"↘","ℒ":"ℒ","↰":"↰","Ł":"Ł","≪":"≪","⤅":"⤅","М":"М"," ":" ","ℳ":"ℳ","𝔐":"𝔐","∓":"∓","𝕄":"𝕄","ℳ":"ℳ","Μ":"Μ","Њ":"Њ","Ń":"Ń","Ň":"Ň","Ņ":"Ņ","Н":"Н","​":"​","​":"​","​":"​","​":"​","≫":"≫","≪":"≪"," ":"\n","𝔑":"𝔑","⁠":"⁠"," ":" ","ℕ":"ℕ","⫬":"⫬","≢":"≢","≭":"≭","∦":"∦","∉":"∉","≠":"≠","≂̸":"≂̸","∄":"∄","≯":"≯","≱":"≱","≧̸":"≧̸","≫̸":"≫̸","≹":"≹","⩾̸":"⩾̸","≵":"≵","≎̸":"≎̸","≏̸":"≏̸","⋪":"⋪","⧏̸":"⧏̸","⋬":"⋬","≮":"≮","≰":"≰","≸":"≸","≪̸":"≪̸","⩽̸":"⩽̸","≴":"≴","⪢̸":"⪢̸","⪡̸":"⪡̸","⊀":"⊀","⪯̸":"⪯̸","⋠":"⋠","∌":"∌","⋫":"⋫","⧐̸":"⧐̸","⋭":"⋭","⊏̸":"⊏̸","⋢":"⋢","⊐̸":"⊐̸","⋣":"⋣","⊂⃒":"⊂⃒","⊈":"⊈","⊁":"⊁","⪰̸":"⪰̸","⋡":"⋡","≿̸":"≿̸","⊃⃒":"⊃⃒","⊉":"⊉","≁":"≁","≄":"≄","≇":"≇","≉":"≉","∤":"∤","𝒩":"𝒩","Ñ":"Ñ","Ñ":"Ñ","Ν":"Ν","Œ":"Œ","Ó":"Ó","Ó":"Ó","Ô":"Ô","Ô":"Ô","О":"О","Ő":"Ő","𝔒":"𝔒","Ò":"Ò","Ò":"Ò","Ō":"Ō","Ω":"Ω","Ο":"Ο","𝕆":"𝕆","“":"“","‘":"‘","⩔":"⩔","𝒪":"𝒪","Ø":"Ø","Ø":"Ø","Õ":"Õ","Õ":"Õ","⨷":"⨷","Ö":"Ö","Ö":"Ö","‾":"‾","⏞":"⏞","⎴":"⎴","⏜":"⏜","∂":"∂","П":"П","𝔓":"𝔓","Φ":"Φ","Π":"Π","±":"±","ℌ":"ℌ","ℙ":"ℙ","⪻":"⪻","≺":"≺","⪯":"⪯","≼":"≼","≾":"≾","″":"″","∏":"∏","∷":"∷","∝":"∝","𝒫":"𝒫","Ψ":"Ψ",""":'"',""":'"',"𝔔":"𝔔","ℚ":"ℚ","𝒬":"𝒬","⤐":"⤐","®":"®","®":"®","Ŕ":"Ŕ","⟫":"⟫","↠":"↠","⤖":"⤖","Ř":"Ř","Ŗ":"Ŗ","Р":"Р","ℜ":"ℜ","∋":"∋","⇋":"⇋","⥯":"⥯","ℜ":"ℜ","Ρ":"Ρ","⟩":"⟩","→":"→","⇥":"⇥","⇄":"⇄","⌉":"⌉","⟧":"⟧","⥝":"⥝","⇂":"⇂","⥕":"⥕","⌋":"⌋","⊢":"⊢","↦":"↦","⥛":"⥛","⊳":"⊳","⧐":"⧐","⊵":"⊵","⥏":"⥏","⥜":"⥜","↾":"↾","⥔":"⥔","⇀":"⇀","⥓":"⥓","⇒":"⇒","ℝ":"ℝ","⥰":"⥰","⇛":"⇛","ℛ":"ℛ","↱":"↱","⧴":"⧴","Щ":"Щ","Ш":"Ш","Ь":"Ь","Ś":"Ś","⪼":"⪼","Š":"Š","Ş":"Ş","Ŝ":"Ŝ","С":"С","𝔖":"𝔖","↓":"↓","←":"←","→":"→","↑":"↑","Σ":"Σ","∘":"∘","𝕊":"𝕊","√":"√","□":"□","⊓":"⊓","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊔":"⊔","𝒮":"𝒮","⋆":"⋆","⋐":"⋐","⋐":"⋐","⊆":"⊆","≻":"≻","⪰":"⪰","≽":"≽","≿":"≿","∋":"∋","∑":"∑","⋑":"⋑","⊃":"⊃","⊇":"⊇","⋑":"⋑","Þ":"Þ","Þ":"Þ","™":"™","Ћ":"Ћ","Ц":"Ц"," ":"\t","Τ":"Τ","Ť":"Ť","Ţ":"Ţ","Т":"Т","𝔗":"𝔗","∴":"∴","Θ":"Θ","  ":"  "," ":" ","∼":"∼","≃":"≃","≅":"≅","≈":"≈","𝕋":"𝕋","⃛":"⃛","𝒯":"𝒯","Ŧ":"Ŧ","Ú":"Ú","Ú":"Ú","↟":"↟","⥉":"⥉","Ў":"Ў","Ŭ":"Ŭ","Û":"Û","Û":"Û","У":"У","Ű":"Ű","𝔘":"𝔘","Ù":"Ù","Ù":"Ù","Ū":"Ū","_":"_","⏟":"⏟","⎵":"⎵","⏝":"⏝","⋃":"⋃","⊎":"⊎","Ų":"Ų","𝕌":"𝕌","↑":"↑","⤒":"⤒","⇅":"⇅","↕":"↕","⥮":"⥮","⊥":"⊥","↥":"↥","⇑":"⇑","⇕":"⇕","↖":"↖","↗":"↗","ϒ":"ϒ","Υ":"Υ","Ů":"Ů","𝒰":"𝒰","Ũ":"Ũ","Ü":"Ü","Ü":"Ü","⊫":"⊫","⫫":"⫫","В":"В","⊩":"⊩","⫦":"⫦","⋁":"⋁","‖":"‖","‖":"‖","∣":"∣","|":"|","❘":"❘","≀":"≀"," ":" ","𝔙":"𝔙","𝕍":"𝕍","𝒱":"𝒱","⊪":"⊪","Ŵ":"Ŵ","⋀":"⋀","𝔚":"𝔚","𝕎":"𝕎","𝒲":"𝒲","𝔛":"𝔛","Ξ":"Ξ","𝕏":"𝕏","𝒳":"𝒳","Я":"Я","Ї":"Ї","Ю":"Ю","Ý":"Ý","Ý":"Ý","Ŷ":"Ŷ","Ы":"Ы","𝔜":"𝔜","𝕐":"𝕐","𝒴":"𝒴","Ÿ":"Ÿ","Ж":"Ж","Ź":"Ź","Ž":"Ž","З":"З","Ż":"Ż","​":"​","Ζ":"Ζ","ℨ":"ℨ","ℤ":"ℤ","𝒵":"𝒵","á":"á","á":"á","ă":"ă","∾":"∾","∾̳":"∾̳","∿":"∿","â":"â","â":"â","´":"´","´":"´","а":"а","æ":"æ","æ":"æ","⁡":"⁡","𝔞":"𝔞","à":"à","à":"à","ℵ":"ℵ","ℵ":"ℵ","α":"α","ā":"ā","⨿":"⨿","&":"&","&":"&","∧":"∧","⩕":"⩕","⩜":"⩜","⩘":"⩘","⩚":"⩚","∠":"∠","⦤":"⦤","∠":"∠","∡":"∡","⦨":"⦨","⦩":"⦩","⦪":"⦪","⦫":"⦫","⦬":"⦬","⦭":"⦭","⦮":"⦮","⦯":"⦯","∟":"∟","⊾":"⊾","⦝":"⦝","∢":"∢","Å":"Å","⍼":"⍼","ą":"ą","𝕒":"𝕒","≈":"≈","⩰":"⩰","⩯":"⩯","≊":"≊","≋":"≋","'":"'","≈":"≈","≊":"≊","å":"å","å":"å","𝒶":"𝒶","*":"*","≈":"≈","≍":"≍","ã":"ã","ã":"ã","ä":"ä","ä":"ä","∳":"∳","⨑":"⨑","⫭":"⫭","≌":"≌","϶":"϶","‵":"‵","∽":"∽","⋍":"⋍","⊽":"⊽","⌅":"⌅","⌅":"⌅","⎵":"⎵","⎶":"⎶","≌":"≌","б":"б","„":"„","∵":"∵","∵":"∵","⦰":"⦰","϶":"϶","ℬ":"ℬ","β":"β","ℶ":"ℶ","≬":"≬","𝔟":"𝔟","⋂":"⋂","◯":"◯","⋃":"⋃","⨀":"⨀","⨁":"⨁","⨂":"⨂","⨆":"⨆","★":"★","▽":"▽","△":"△","⨄":"⨄","⋁":"⋁","⋀":"⋀","⤍":"⤍","⧫":"⧫","▪":"▪","▴":"▴","▾":"▾","◂":"◂","▸":"▸","␣":"␣","▒":"▒","░":"░","▓":"▓","█":"█","=⃥":"=⃥","≡⃥":"≡⃥","⌐":"⌐","𝕓":"𝕓","⊥":"⊥","⊥":"⊥","⋈":"⋈","╗":"╗","╔":"╔","╖":"╖","╓":"╓","═":"═","╦":"╦","╩":"╩","╤":"╤","╧":"╧","╝":"╝","╚":"╚","╜":"╜","╙":"╙","║":"║","╬":"╬","╣":"╣","╠":"╠","╫":"╫","╢":"╢","╟":"╟","⧉":"⧉","╕":"╕","╒":"╒","┐":"┐","┌":"┌","─":"─","╥":"╥","╨":"╨","┬":"┬","┴":"┴","⊟":"⊟","⊞":"⊞","⊠":"⊠","╛":"╛","╘":"╘","┘":"┘","└":"└","│":"│","╪":"╪","╡":"╡","╞":"╞","┼":"┼","┤":"┤","├":"├","‵":"‵","˘":"˘","¦":"¦","¦":"¦","𝒷":"𝒷","⁏":"⁏","∽":"∽","⋍":"⋍","\":"\\","⧅":"⧅","⟈":"⟈","•":"•","•":"•","≎":"≎","⪮":"⪮","≏":"≏","≏":"≏","ć":"ć","∩":"∩","⩄":"⩄","⩉":"⩉","⩋":"⩋","⩇":"⩇","⩀":"⩀","∩︀":"∩︀","⁁":"⁁","ˇ":"ˇ","⩍":"⩍","č":"č","ç":"ç","ç":"ç","ĉ":"ĉ","⩌":"⩌","⩐":"⩐","ċ":"ċ","¸":"¸","¸":"¸","⦲":"⦲","¢":"¢","¢":"¢","·":"·","𝔠":"𝔠","ч":"ч","✓":"✓","✓":"✓","χ":"χ","○":"○","⧃":"⧃","ˆ":"ˆ","≗":"≗","↺":"↺","↻":"↻","®":"®","Ⓢ":"Ⓢ","⊛":"⊛","⊚":"⊚","⊝":"⊝","≗":"≗","⨐":"⨐","⫯":"⫯","⧂":"⧂","♣":"♣","♣":"♣",":":":","≔":"≔","≔":"≔",",":",","@":"@","∁":"∁","∘":"∘","∁":"∁","ℂ":"ℂ","≅":"≅","⩭":"⩭","∮":"∮","𝕔":"𝕔","∐":"∐","©":"©","©":"©","℗":"℗","↵":"↵","✗":"✗","𝒸":"𝒸","⫏":"⫏","⫑":"⫑","⫐":"⫐","⫒":"⫒","⋯":"⋯","⤸":"⤸","⤵":"⤵","⋞":"⋞","⋟":"⋟","↶":"↶","⤽":"⤽","∪":"∪","⩈":"⩈","⩆":"⩆","⩊":"⩊","⊍":"⊍","⩅":"⩅","∪︀":"∪︀","↷":"↷","⤼":"⤼","⋞":"⋞","⋟":"⋟","⋎":"⋎","⋏":"⋏","¤":"¤","¤":"¤","↶":"↶","↷":"↷","⋎":"⋎","⋏":"⋏","∲":"∲","∱":"∱","⌭":"⌭","⇓":"⇓","⥥":"⥥","†":"†","ℸ":"ℸ","↓":"↓","‐":"‐","⊣":"⊣","⤏":"⤏","˝":"˝","ď":"ď","д":"д","ⅆ":"ⅆ","‡":"‡","⇊":"⇊","⩷":"⩷","°":"°","°":"°","δ":"δ","⦱":"⦱","⥿":"⥿","𝔡":"𝔡","⇃":"⇃","⇂":"⇂","⋄":"⋄","⋄":"⋄","♦":"♦","♦":"♦","¨":"¨","ϝ":"ϝ","⋲":"⋲","÷":"÷","÷":"÷","÷":"÷","⋇":"⋇","⋇":"⋇","ђ":"ђ","⌞":"⌞","⌍":"⌍","$":"$","𝕕":"𝕕","˙":"˙","≐":"≐","≑":"≑","∸":"∸","∔":"∔","⊡":"⊡","⌆":"⌆","↓":"↓","⇊":"⇊","⇃":"⇃","⇂":"⇂","⤐":"⤐","⌟":"⌟","⌌":"⌌","𝒹":"𝒹","ѕ":"ѕ","⧶":"⧶","đ":"đ","⋱":"⋱","▿":"▿","▾":"▾","⇵":"⇵","⥯":"⥯","⦦":"⦦","џ":"џ","⟿":"⟿","⩷":"⩷","≑":"≑","é":"é","é":"é","⩮":"⩮","ě":"ě","≖":"≖","ê":"ê","ê":"ê","≕":"≕","э":"э","ė":"ė","ⅇ":"ⅇ","≒":"≒","𝔢":"𝔢","⪚":"⪚","è":"è","è":"è","⪖":"⪖","⪘":"⪘","⪙":"⪙","⏧":"⏧","ℓ":"ℓ","⪕":"⪕","⪗":"⪗","ē":"ē","∅":"∅","∅":"∅","∅":"∅"," ":" "," ":" "," ":" ","ŋ":"ŋ"," ":" ","ę":"ę","𝕖":"𝕖","⋕":"⋕","⧣":"⧣","⩱":"⩱","ε":"ε","ε":"ε","ϵ":"ϵ","≖":"≖","≕":"≕","≂":"≂","⪖":"⪖","⪕":"⪕","=":"=","≟":"≟","≡":"≡","⩸":"⩸","⧥":"⧥","≓":"≓","⥱":"⥱","ℯ":"ℯ","≐":"≐","≂":"≂","η":"η","ð":"ð","ð":"ð","ë":"ë","ë":"ë","€":"€","!":"!","∃":"∃","ℰ":"ℰ","ⅇ":"ⅇ","≒":"≒","ф":"ф","♀":"♀","ffi":"ffi","ff":"ff","ffl":"ffl","𝔣":"𝔣","fi":"fi","fj":"fj","♭":"♭","fl":"fl","▱":"▱","ƒ":"ƒ","𝕗":"𝕗","∀":"∀","⋔":"⋔","⫙":"⫙","⨍":"⨍","½":"½","½":"½","⅓":"⅓","¼":"¼","¼":"¼","⅕":"⅕","⅙":"⅙","⅛":"⅛","⅔":"⅔","⅖":"⅖","¾":"¾","¾":"¾","⅗":"⅗","⅜":"⅜","⅘":"⅘","⅚":"⅚","⅝":"⅝","⅞":"⅞","⁄":"⁄","⌢":"⌢","𝒻":"𝒻","≧":"≧","⪌":"⪌","ǵ":"ǵ","γ":"γ","ϝ":"ϝ","⪆":"⪆","ğ":"ğ","ĝ":"ĝ","г":"г","ġ":"ġ","≥":"≥","⋛":"⋛","≥":"≥","≧":"≧","⩾":"⩾","⩾":"⩾","⪩":"⪩","⪀":"⪀","⪂":"⪂","⪄":"⪄","⋛︀":"⋛︀","⪔":"⪔","𝔤":"𝔤","≫":"≫","⋙":"⋙","ℷ":"ℷ","ѓ":"ѓ","≷":"≷","⪒":"⪒","⪥":"⪥","⪤":"⪤","≩":"≩","⪊":"⪊","⪊":"⪊","⪈":"⪈","⪈":"⪈","≩":"≩","⋧":"⋧","𝕘":"𝕘","`":"`","ℊ":"ℊ","≳":"≳","⪎":"⪎","⪐":"⪐",">":">",">":">","⪧":"⪧","⩺":"⩺","⋗":"⋗","⦕":"⦕","⩼":"⩼","⪆":"⪆","⥸":"⥸","⋗":"⋗","⋛":"⋛","⪌":"⪌","≷":"≷","≳":"≳","≩︀":"≩︀","≩︀":"≩︀","⇔":"⇔"," ":" ","½":"½","ℋ":"ℋ","ъ":"ъ","↔":"↔","⥈":"⥈","↭":"↭","ℏ":"ℏ","ĥ":"ĥ","♥":"♥","♥":"♥","…":"…","⊹":"⊹","𝔥":"𝔥","⤥":"⤥","⤦":"⤦","⇿":"⇿","∻":"∻","↩":"↩","↪":"↪","𝕙":"𝕙","―":"―","𝒽":"𝒽","ℏ":"ℏ","ħ":"ħ","⁃":"⁃","‐":"‐","í":"í","í":"í","⁣":"⁣","î":"î","î":"î","и":"и","е":"е","¡":"¡","¡":"¡","⇔":"⇔","𝔦":"𝔦","ì":"ì","ì":"ì","ⅈ":"ⅈ","⨌":"⨌","∭":"∭","⧜":"⧜","℩":"℩","ij":"ij","ī":"ī","ℑ":"ℑ","ℐ":"ℐ","ℑ":"ℑ","ı":"ı","⊷":"⊷","Ƶ":"Ƶ","∈":"∈","℅":"℅","∞":"∞","⧝":"⧝","ı":"ı","∫":"∫","⊺":"⊺","ℤ":"ℤ","⊺":"⊺","⨗":"⨗","⨼":"⨼","ё":"ё","į":"į","𝕚":"𝕚","ι":"ι","⨼":"⨼","¿":"¿","¿":"¿","𝒾":"𝒾","∈":"∈","⋹":"⋹","⋵":"⋵","⋴":"⋴","⋳":"⋳","∈":"∈","⁢":"⁢","ĩ":"ĩ","і":"і","ï":"ï","ï":"ï","ĵ":"ĵ","й":"й","𝔧":"𝔧","ȷ":"ȷ","𝕛":"𝕛","𝒿":"𝒿","ј":"ј","є":"є","κ":"κ","ϰ":"ϰ","ķ":"ķ","к":"к","𝔨":"𝔨","ĸ":"ĸ","х":"х","ќ":"ќ","𝕜":"𝕜","𝓀":"𝓀","⇚":"⇚","⇐":"⇐","⤛":"⤛","⤎":"⤎","≦":"≦","⪋":"⪋","⥢":"⥢","ĺ":"ĺ","⦴":"⦴","ℒ":"ℒ","λ":"λ","⟨":"⟨","⦑":"⦑","⟨":"⟨","⪅":"⪅","«":"«","«":"«","←":"←","⇤":"⇤","⤟":"⤟","⤝":"⤝","↩":"↩","↫":"↫","⤹":"⤹","⥳":"⥳","↢":"↢","⪫":"⪫","⤙":"⤙","⪭":"⪭","⪭︀":"⪭︀","⤌":"⤌","❲":"❲","{":"{","[":"[","⦋":"⦋","⦏":"⦏","⦍":"⦍","ľ":"ľ","ļ":"ļ","⌈":"⌈","{":"{","л":"л","⤶":"⤶","“":"“","„":"„","⥧":"⥧","⥋":"⥋","↲":"↲","≤":"≤","←":"←","↢":"↢","↽":"↽","↼":"↼","⇇":"⇇","↔":"↔","⇆":"⇆","⇋":"⇋","↭":"↭","⋋":"⋋","⋚":"⋚","≤":"≤","≦":"≦","⩽":"⩽","⩽":"⩽","⪨":"⪨","⩿":"⩿","⪁":"⪁","⪃":"⪃","⋚︀":"⋚︀","⪓":"⪓","⪅":"⪅","⋖":"⋖","⋚":"⋚","⪋":"⪋","≶":"≶","≲":"≲","⥼":"⥼","⌊":"⌊","𝔩":"𝔩","≶":"≶","⪑":"⪑","↽":"↽","↼":"↼","⥪":"⥪","▄":"▄","љ":"љ","≪":"≪","⇇":"⇇","⌞":"⌞","⥫":"⥫","◺":"◺","ŀ":"ŀ","⎰":"⎰","⎰":"⎰","≨":"≨","⪉":"⪉","⪉":"⪉","⪇":"⪇","⪇":"⪇","≨":"≨","⋦":"⋦","⟬":"⟬","⇽":"⇽","⟦":"⟦","⟵":"⟵","⟷":"⟷","⟼":"⟼","⟶":"⟶","↫":"↫","↬":"↬","⦅":"⦅","𝕝":"𝕝","⨭":"⨭","⨴":"⨴","∗":"∗","_":"_","◊":"◊","◊":"◊","⧫":"⧫","(":"(","⦓":"⦓","⇆":"⇆","⌟":"⌟","⇋":"⇋","⥭":"⥭","‎":"‎","⊿":"⊿","‹":"‹","𝓁":"𝓁","↰":"↰","≲":"≲","⪍":"⪍","⪏":"⪏","[":"[","‘":"‘","‚":"‚","ł":"ł","<":"<","<":"<","⪦":"⪦","⩹":"⩹","⋖":"⋖","⋋":"⋋","⋉":"⋉","⥶":"⥶","⩻":"⩻","⦖":"⦖","◃":"◃","⊴":"⊴","◂":"◂","⥊":"⥊","⥦":"⥦","≨︀":"≨︀","≨︀":"≨︀","∺":"∺","¯":"¯","¯":"¯","♂":"♂","✠":"✠","✠":"✠","↦":"↦","↦":"↦","↧":"↧","↤":"↤","↥":"↥","▮":"▮","⨩":"⨩","м":"м","—":"—","∡":"∡","𝔪":"𝔪","℧":"℧","µ":"µ","µ":"µ","∣":"∣","*":"*","⫰":"⫰","·":"·","·":"·","−":"−","⊟":"⊟","∸":"∸","⨪":"⨪","⫛":"⫛","…":"…","∓":"∓","⊧":"⊧","𝕞":"𝕞","∓":"∓","𝓂":"𝓂","∾":"∾","μ":"μ","⊸":"⊸","⊸":"⊸","⋙̸":"⋙̸","≫⃒":"≫⃒","≫̸":"≫̸","⇍":"⇍","⇎":"⇎","⋘̸":"⋘̸","≪⃒":"≪⃒","≪̸":"≪̸","⇏":"⇏","⊯":"⊯","⊮":"⊮","∇":"∇","ń":"ń","∠⃒":"∠⃒","≉":"≉","⩰̸":"⩰̸","≋̸":"≋̸","ʼn":"ʼn","≉":"≉","♮":"♮","♮":"♮","ℕ":"ℕ"," ":" "," ":" ","≎̸":"≎̸","≏̸":"≏̸","⩃":"⩃","ň":"ň","ņ":"ņ","≇":"≇","⩭̸":"⩭̸","⩂":"⩂","н":"н","–":"–","≠":"≠","⇗":"⇗","⤤":"⤤","↗":"↗","↗":"↗","≐̸":"≐̸","≢":"≢","⤨":"⤨","≂̸":"≂̸","∄":"∄","∄":"∄","𝔫":"𝔫","≧̸":"≧̸","≱":"≱","≱":"≱","≧̸":"≧̸","⩾̸":"⩾̸","⩾̸":"⩾̸","≵":"≵","≯":"≯","≯":"≯","⇎":"⇎","↮":"↮","⫲":"⫲","∋":"∋","⋼":"⋼","⋺":"⋺","∋":"∋","њ":"њ","⇍":"⇍","≦̸":"≦̸","↚":"↚","‥":"‥","≰":"≰","↚":"↚","↮":"↮","≰":"≰","≦̸":"≦̸","⩽̸":"⩽̸","⩽̸":"⩽̸","≮":"≮","≴":"≴","≮":"≮","⋪":"⋪","⋬":"⋬","∤":"∤","𝕟":"𝕟","¬":"¬","¬":"¬","∉":"∉","⋹̸":"⋹̸","⋵̸":"⋵̸","∉":"∉","⋷":"⋷","⋶":"⋶","∌":"∌","∌":"∌","⋾":"⋾","⋽":"⋽","∦":"∦","∦":"∦","⫽⃥":"⫽⃥","∂̸":"∂̸","⨔":"⨔","⊀":"⊀","⋠":"⋠","⪯̸":"⪯̸","⊀":"⊀","⪯̸":"⪯̸","⇏":"⇏","↛":"↛","⤳̸":"⤳̸","↝̸":"↝̸","↛":"↛","⋫":"⋫","⋭":"⋭","⊁":"⊁","⋡":"⋡","⪰̸":"⪰̸","𝓃":"𝓃","∤":"∤","∦":"∦","≁":"≁","≄":"≄","≄":"≄","∤":"∤","∦":"∦","⋢":"⋢","⋣":"⋣","⊄":"⊄","⫅̸":"⫅̸","⊈":"⊈","⊂⃒":"⊂⃒","⊈":"⊈","⫅̸":"⫅̸","⊁":"⊁","⪰̸":"⪰̸","⊅":"⊅","⫆̸":"⫆̸","⊉":"⊉","⊃⃒":"⊃⃒","⊉":"⊉","⫆̸":"⫆̸","≹":"≹","ñ":"ñ","ñ":"ñ","≸":"≸","⋪":"⋪","⋬":"⋬","⋫":"⋫","⋭":"⋭","ν":"ν","#":"#","№":"№"," ":" ","⊭":"⊭","⤄":"⤄","≍⃒":"≍⃒","⊬":"⊬","≥⃒":"≥⃒",">⃒":">⃒","⧞":"⧞","⤂":"⤂","≤⃒":"≤⃒","<⃒":"<⃒","⊴⃒":"⊴⃒","⤃":"⤃","⊵⃒":"⊵⃒","∼⃒":"∼⃒","⇖":"⇖","⤣":"⤣","↖":"↖","↖":"↖","⤧":"⤧","Ⓢ":"Ⓢ","ó":"ó","ó":"ó","⊛":"⊛","⊚":"⊚","ô":"ô","ô":"ô","о":"о","⊝":"⊝","ő":"ő","⨸":"⨸","⊙":"⊙","⦼":"⦼","œ":"œ","⦿":"⦿","𝔬":"𝔬","˛":"˛","ò":"ò","ò":"ò","⧁":"⧁","⦵":"⦵","Ω":"Ω","∮":"∮","↺":"↺","⦾":"⦾","⦻":"⦻","‾":"‾","⧀":"⧀","ō":"ō","ω":"ω","ο":"ο","⦶":"⦶","⊖":"⊖","𝕠":"𝕠","⦷":"⦷","⦹":"⦹","⊕":"⊕","∨":"∨","↻":"↻","⩝":"⩝","ℴ":"ℴ","ℴ":"ℴ","ª":"ª","ª":"ª","º":"º","º":"º","⊶":"⊶","⩖":"⩖","⩗":"⩗","⩛":"⩛","ℴ":"ℴ","ø":"ø","ø":"ø","⊘":"⊘","õ":"õ","õ":"õ","⊗":"⊗","⨶":"⨶","ö":"ö","ö":"ö","⌽":"⌽","∥":"∥","¶":"¶","¶":"¶","∥":"∥","⫳":"⫳","⫽":"⫽","∂":"∂","п":"п","%":"%",".":".","‰":"‰","⊥":"⊥","‱":"‱","𝔭":"𝔭","φ":"φ","ϕ":"ϕ","ℳ":"ℳ","☎":"☎","π":"π","⋔":"⋔","ϖ":"ϖ","ℏ":"ℏ","ℎ":"ℎ","ℏ":"ℏ","+":"+","⨣":"⨣","⊞":"⊞","⨢":"⨢","∔":"∔","⨥":"⨥","⩲":"⩲","±":"±","±":"±","⨦":"⨦","⨧":"⨧","±":"±","⨕":"⨕","𝕡":"𝕡","£":"£","£":"£","≺":"≺","⪳":"⪳","⪷":"⪷","≼":"≼","⪯":"⪯","≺":"≺","⪷":"⪷","≼":"≼","⪯":"⪯","⪹":"⪹","⪵":"⪵","⋨":"⋨","≾":"≾","′":"′","ℙ":"ℙ","⪵":"⪵","⪹":"⪹","⋨":"⋨","∏":"∏","⌮":"⌮","⌒":"⌒","⌓":"⌓","∝":"∝","∝":"∝","≾":"≾","⊰":"⊰","𝓅":"𝓅","ψ":"ψ"," ":" ","𝔮":"𝔮","⨌":"⨌","𝕢":"𝕢","⁗":"⁗","𝓆":"𝓆","ℍ":"ℍ","⨖":"⨖","?":"?","≟":"≟",""":'"',""":'"',"⇛":"⇛","⇒":"⇒","⤜":"⤜","⤏":"⤏","⥤":"⥤","∽̱":"∽̱","ŕ":"ŕ","√":"√","⦳":"⦳","⟩":"⟩","⦒":"⦒","⦥":"⦥","⟩":"⟩","»":"»","»":"»","→":"→","⥵":"⥵","⇥":"⇥","⤠":"⤠","⤳":"⤳","⤞":"⤞","↪":"↪","↬":"↬","⥅":"⥅","⥴":"⥴","↣":"↣","↝":"↝","⤚":"⤚","∶":"∶","ℚ":"ℚ","⤍":"⤍","❳":"❳","}":"}","]":"]","⦌":"⦌","⦎":"⦎","⦐":"⦐","ř":"ř","ŗ":"ŗ","⌉":"⌉","}":"}","р":"р","⤷":"⤷","⥩":"⥩","”":"”","”":"”","↳":"↳","ℜ":"ℜ","ℛ":"ℛ","ℜ":"ℜ","ℝ":"ℝ","▭":"▭","®":"®","®":"®","⥽":"⥽","⌋":"⌋","𝔯":"𝔯","⇁":"⇁","⇀":"⇀","⥬":"⥬","ρ":"ρ","ϱ":"ϱ","→":"→","↣":"↣","⇁":"⇁","⇀":"⇀","⇄":"⇄","⇌":"⇌","⇉":"⇉","↝":"↝","⋌":"⋌","˚":"˚","≓":"≓","⇄":"⇄","⇌":"⇌","‏":"‏","⎱":"⎱","⎱":"⎱","⫮":"⫮","⟭":"⟭","⇾":"⇾","⟧":"⟧","⦆":"⦆","𝕣":"𝕣","⨮":"⨮","⨵":"⨵",")":")","⦔":"⦔","⨒":"⨒","⇉":"⇉","›":"›","𝓇":"𝓇","↱":"↱","]":"]","’":"’","’":"’","⋌":"⋌","⋊":"⋊","▹":"▹","⊵":"⊵","▸":"▸","⧎":"⧎","⥨":"⥨","℞":"℞","ś":"ś","‚":"‚","≻":"≻","⪴":"⪴","⪸":"⪸","š":"š","≽":"≽","⪰":"⪰","ş":"ş","ŝ":"ŝ","⪶":"⪶","⪺":"⪺","⋩":"⋩","⨓":"⨓","≿":"≿","с":"с","⋅":"⋅","⊡":"⊡","⩦":"⩦","⇘":"⇘","⤥":"⤥","↘":"↘","↘":"↘","§":"§","§":"§",";":";","⤩":"⤩","∖":"∖","∖":"∖","✶":"✶","𝔰":"𝔰","⌢":"⌢","♯":"♯","щ":"щ","ш":"ш","∣":"∣","∥":"∥","­":"­","­":"­","σ":"σ","ς":"ς","ς":"ς","∼":"∼","⩪":"⩪","≃":"≃","≃":"≃","⪞":"⪞","⪠":"⪠","⪝":"⪝","⪟":"⪟","≆":"≆","⨤":"⨤","⥲":"⥲","←":"←","∖":"∖","⨳":"⨳","⧤":"⧤","∣":"∣","⌣":"⌣","⪪":"⪪","⪬":"⪬","⪬︀":"⪬︀","ь":"ь","/":"/","⧄":"⧄","⌿":"⌿","𝕤":"𝕤","♠":"♠","♠":"♠","∥":"∥","⊓":"⊓","⊓︀":"⊓︀","⊔":"⊔","⊔︀":"⊔︀","⊏":"⊏","⊑":"⊑","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊐":"⊐","⊒":"⊒","□":"□","□":"□","▪":"▪","▪":"▪","→":"→","𝓈":"𝓈","∖":"∖","⌣":"⌣","⋆":"⋆","☆":"☆","★":"★","ϵ":"ϵ","ϕ":"ϕ","¯":"¯","⊂":"⊂","⫅":"⫅","⪽":"⪽","⊆":"⊆","⫃":"⫃","⫁":"⫁","⫋":"⫋","⊊":"⊊","⪿":"⪿","⥹":"⥹","⊂":"⊂","⊆":"⊆","⫅":"⫅","⊊":"⊊","⫋":"⫋","⫇":"⫇","⫕":"⫕","⫓":"⫓","≻":"≻","⪸":"⪸","≽":"≽","⪰":"⪰","⪺":"⪺","⪶":"⪶","⋩":"⋩","≿":"≿","∑":"∑","♪":"♪","¹":"¹","¹":"¹","²":"²","²":"²","³":"³","³":"³","⊃":"⊃","⫆":"⫆","⪾":"⪾","⫘":"⫘","⊇":"⊇","⫄":"⫄","⟉":"⟉","⫗":"⫗","⥻":"⥻","⫂":"⫂","⫌":"⫌","⊋":"⊋","⫀":"⫀","⊃":"⊃","⊇":"⊇","⫆":"⫆","⊋":"⊋","⫌":"⫌","⫈":"⫈","⫔":"⫔","⫖":"⫖","⇙":"⇙","⤦":"⤦","↙":"↙","↙":"↙","⤪":"⤪","ß":"ß","ß":"ß","⌖":"⌖","τ":"τ","⎴":"⎴","ť":"ť","ţ":"ţ","т":"т","⃛":"⃛","⌕":"⌕","𝔱":"𝔱","∴":"∴","∴":"∴","θ":"θ","ϑ":"ϑ","ϑ":"ϑ","≈":"≈","∼":"∼"," ":" ","≈":"≈","∼":"∼","þ":"þ","þ":"þ","˜":"˜","×":"×","×":"×","⊠":"⊠","⨱":"⨱","⨰":"⨰","∭":"∭","⤨":"⤨","⊤":"⊤","⌶":"⌶","⫱":"⫱","𝕥":"𝕥","⫚":"⫚","⤩":"⤩","‴":"‴","™":"™","▵":"▵","▿":"▿","◃":"◃","⊴":"⊴","≜":"≜","▹":"▹","⊵":"⊵","◬":"◬","≜":"≜","⨺":"⨺","⨹":"⨹","⧍":"⧍","⨻":"⨻","⏢":"⏢","𝓉":"𝓉","ц":"ц","ћ":"ћ","ŧ":"ŧ","≬":"≬","↞":"↞","↠":"↠","⇑":"⇑","⥣":"⥣","ú":"ú","ú":"ú","↑":"↑","ў":"ў","ŭ":"ŭ","û":"û","û":"û","у":"у","⇅":"⇅","ű":"ű","⥮":"⥮","⥾":"⥾","𝔲":"𝔲","ù":"ù","ù":"ù","↿":"↿","↾":"↾","▀":"▀","⌜":"⌜","⌜":"⌜","⌏":"⌏","◸":"◸","ū":"ū","¨":"¨","¨":"¨","ų":"ų","𝕦":"𝕦","↑":"↑","↕":"↕","↿":"↿","↾":"↾","⊎":"⊎","υ":"υ","ϒ":"ϒ","υ":"υ","⇈":"⇈","⌝":"⌝","⌝":"⌝","⌎":"⌎","ů":"ů","◹":"◹","𝓊":"𝓊","⋰":"⋰","ũ":"ũ","▵":"▵","▴":"▴","⇈":"⇈","ü":"ü","ü":"ü","⦧":"⦧","⇕":"⇕","⫨":"⫨","⫩":"⫩","⊨":"⊨","⦜":"⦜","ϵ":"ϵ","ϰ":"ϰ","∅":"∅","ϕ":"ϕ","ϖ":"ϖ","∝":"∝","↕":"↕","ϱ":"ϱ","ς":"ς","⊊︀":"⊊︀","⫋︀":"⫋︀","⊋︀":"⊋︀","⫌︀":"⫌︀","ϑ":"ϑ","⊲":"⊲","⊳":"⊳","в":"в","⊢":"⊢","∨":"∨","⊻":"⊻","≚":"≚","⋮":"⋮","|":"|","|":"|","𝔳":"𝔳","⊲":"⊲","⊂⃒":"⊂⃒","⊃⃒":"⊃⃒","𝕧":"𝕧","∝":"∝","⊳":"⊳","𝓋":"𝓋","⫋︀":"⫋︀","⊊︀":"⊊︀","⫌︀":"⫌︀","⊋︀":"⊋︀","⦚":"⦚","ŵ":"ŵ","⩟":"⩟","∧":"∧","≙":"≙","℘":"℘","𝔴":"𝔴","𝕨":"𝕨","℘":"℘","≀":"≀","≀":"≀","𝓌":"𝓌","⋂":"⋂","◯":"◯","⋃":"⋃","▽":"▽","𝔵":"𝔵","⟺":"⟺","⟷":"⟷","ξ":"ξ","⟸":"⟸","⟵":"⟵","⟼":"⟼","⋻":"⋻","⨀":"⨀","𝕩":"𝕩","⨁":"⨁","⨂":"⨂","⟹":"⟹","⟶":"⟶","𝓍":"𝓍","⨆":"⨆","⨄":"⨄","△":"△","⋁":"⋁","⋀":"⋀","ý":"ý","ý":"ý","я":"я","ŷ":"ŷ","ы":"ы","¥":"¥","¥":"¥","𝔶":"𝔶","ї":"ї","𝕪":"𝕪","𝓎":"𝓎","ю":"ю","ÿ":"ÿ","ÿ":"ÿ","ź":"ź","ž":"ž","з":"з","ż":"ż","ℨ":"ℨ","ζ":"ζ","𝔷":"𝔷","ж":"ж","⇝":"⇝","𝕫":"𝕫","𝓏":"𝓏","‍":"‍","‌":"‌"},characters:{"Æ":"Æ","&":"&","Á":"Á","Ă":"Ă","Â":"Â","А":"А","𝔄":"𝔄","À":"À","Α":"Α","Ā":"Ā","⩓":"⩓","Ą":"Ą","𝔸":"𝔸","⁡":"⁡","Å":"Å","𝒜":"𝒜","≔":"≔","Ã":"Ã","Ä":"Ä","∖":"∖","⫧":"⫧","⌆":"⌆","Б":"Б","∵":"∵","ℬ":"ℬ","Β":"Β","𝔅":"𝔅","𝔹":"𝔹","˘":"˘","≎":"≎","Ч":"Ч","©":"©","Ć":"Ć","⋒":"⋒","ⅅ":"ⅅ","ℭ":"ℭ","Č":"Č","Ç":"Ç","Ĉ":"Ĉ","∰":"∰","Ċ":"Ċ","¸":"¸","·":"·","Χ":"Χ","⊙":"⊙","⊖":"⊖","⊕":"⊕","⊗":"⊗","∲":"∲","”":"”","’":"’","∷":"∷","⩴":"⩴","≡":"≡","∯":"∯","∮":"∮","ℂ":"ℂ","∐":"∐","∳":"∳","⨯":"⨯","𝒞":"𝒞","⋓":"⋓","≍":"≍","⤑":"⤑","Ђ":"Ђ","Ѕ":"Ѕ","Џ":"Џ","‡":"‡","↡":"↡","⫤":"⫤","Ď":"Ď","Д":"Д","∇":"∇","Δ":"Δ","𝔇":"𝔇","´":"´","˙":"˙","˝":"˝","`":"`","˜":"˜","⋄":"⋄","ⅆ":"ⅆ","𝔻":"𝔻","¨":"¨","⃜":"⃜","≐":"≐","⇓":"⇓","⇐":"⇐","⇔":"⇔","⟸":"⟸","⟺":"⟺","⟹":"⟹","⇒":"⇒","⊨":"⊨","⇑":"⇑","⇕":"⇕","∥":"∥","↓":"↓","⤓":"⤓","⇵":"⇵","̑":"̑","⥐":"⥐","⥞":"⥞","↽":"↽","⥖":"⥖","⥟":"⥟","⇁":"⇁","⥗":"⥗","⊤":"⊤","↧":"↧","𝒟":"𝒟","Đ":"Đ","Ŋ":"Ŋ","Ð":"Ð","É":"É","Ě":"Ě","Ê":"Ê","Э":"Э","Ė":"Ė","𝔈":"𝔈","È":"È","∈":"∈","Ē":"Ē","◻":"◻","▫":"▫","Ę":"Ę","𝔼":"𝔼","Ε":"Ε","⩵":"⩵","≂":"≂","⇌":"⇌","ℰ":"ℰ","⩳":"⩳","Η":"Η","Ë":"Ë","∃":"∃","ⅇ":"ⅇ","Ф":"Ф","𝔉":"𝔉","◼":"◼","▪":"▪","𝔽":"𝔽","∀":"∀","ℱ":"ℱ","Ѓ":"Ѓ",">":">","Γ":"Γ","Ϝ":"Ϝ","Ğ":"Ğ","Ģ":"Ģ","Ĝ":"Ĝ","Г":"Г","Ġ":"Ġ","𝔊":"𝔊","⋙":"⋙","𝔾":"𝔾","≥":"≥","⋛":"⋛","≧":"≧","⪢":"⪢","≷":"≷","⩾":"⩾","≳":"≳","𝒢":"𝒢","≫":"≫","Ъ":"Ъ","ˇ":"ˇ","^":"^","Ĥ":"Ĥ","ℌ":"ℌ","ℋ":"ℋ","ℍ":"ℍ","─":"─","Ħ":"Ħ","≏":"≏","Е":"Е","IJ":"IJ","Ё":"Ё","Í":"Í","Î":"Î","И":"И","İ":"İ","ℑ":"ℑ","Ì":"Ì","Ī":"Ī","ⅈ":"ⅈ","∬":"∬","∫":"∫","⋂":"⋂","⁣":"⁣","⁢":"⁢","Į":"Į","𝕀":"𝕀","Ι":"Ι","ℐ":"ℐ","Ĩ":"Ĩ","І":"І","Ï":"Ï","Ĵ":"Ĵ","Й":"Й","𝔍":"𝔍","𝕁":"𝕁","𝒥":"𝒥","Ј":"Ј","Є":"Є","Х":"Х","Ќ":"Ќ","Κ":"Κ","Ķ":"Ķ","К":"К","𝔎":"𝔎","𝕂":"𝕂","𝒦":"𝒦","Љ":"Љ","<":"<","Ĺ":"Ĺ","Λ":"Λ","⟪":"⟪","ℒ":"ℒ","↞":"↞","Ľ":"Ľ","Ļ":"Ļ","Л":"Л","⟨":"⟨","←":"←","⇤":"⇤","⇆":"⇆","⌈":"⌈","⟦":"⟦","⥡":"⥡","⇃":"⇃","⥙":"⥙","⌊":"⌊","↔":"↔","⥎":"⥎","⊣":"⊣","↤":"↤","⥚":"⥚","⊲":"⊲","⧏":"⧏","⊴":"⊴","⥑":"⥑","⥠":"⥠","↿":"↿","⥘":"⥘","↼":"↼","⥒":"⥒","⋚":"⋚","≦":"≦","≶":"≶","⪡":"⪡","⩽":"⩽","≲":"≲","𝔏":"𝔏","⋘":"⋘","⇚":"⇚","Ŀ":"Ŀ","⟵":"⟵","⟷":"⟷","⟶":"⟶","𝕃":"𝕃","↙":"↙","↘":"↘","↰":"↰","Ł":"Ł","≪":"≪","⤅":"⤅","М":"М"," ":" ","ℳ":"ℳ","𝔐":"𝔐","∓":"∓","𝕄":"𝕄","Μ":"Μ","Њ":"Њ","Ń":"Ń","Ň":"Ň","Ņ":"Ņ","Н":"Н","​":"​","\n":" ","𝔑":"𝔑","⁠":"⁠"," ":" ","ℕ":"ℕ","⫬":"⫬","≢":"≢","≭":"≭","∦":"∦","∉":"∉","≠":"≠","≂̸":"≂̸","∄":"∄","≯":"≯","≱":"≱","≧̸":"≧̸","≫̸":"≫̸","≹":"≹","⩾̸":"⩾̸","≵":"≵","≎̸":"≎̸","≏̸":"≏̸","⋪":"⋪","⧏̸":"⧏̸","⋬":"⋬","≮":"≮","≰":"≰","≸":"≸","≪̸":"≪̸","⩽̸":"⩽̸","≴":"≴","⪢̸":"⪢̸","⪡̸":"⪡̸","⊀":"⊀","⪯̸":"⪯̸","⋠":"⋠","∌":"∌","⋫":"⋫","⧐̸":"⧐̸","⋭":"⋭","⊏̸":"⊏̸","⋢":"⋢","⊐̸":"⊐̸","⋣":"⋣","⊂⃒":"⊂⃒","⊈":"⊈","⊁":"⊁","⪰̸":"⪰̸","⋡":"⋡","≿̸":"≿̸","⊃⃒":"⊃⃒","⊉":"⊉","≁":"≁","≄":"≄","≇":"≇","≉":"≉","∤":"∤","𝒩":"𝒩","Ñ":"Ñ","Ν":"Ν","Œ":"Œ","Ó":"Ó","Ô":"Ô","О":"О","Ő":"Ő","𝔒":"𝔒","Ò":"Ò","Ō":"Ō","Ω":"Ω","Ο":"Ο","𝕆":"𝕆","“":"“","‘":"‘","⩔":"⩔","𝒪":"𝒪","Ø":"Ø","Õ":"Õ","⨷":"⨷","Ö":"Ö","‾":"‾","⏞":"⏞","⎴":"⎴","⏜":"⏜","∂":"∂","П":"П","𝔓":"𝔓","Φ":"Φ","Π":"Π","±":"±","ℙ":"ℙ","⪻":"⪻","≺":"≺","⪯":"⪯","≼":"≼","≾":"≾","″":"″","∏":"∏","∝":"∝","𝒫":"𝒫","Ψ":"Ψ",'"':""","𝔔":"𝔔","ℚ":"ℚ","𝒬":"𝒬","⤐":"⤐","®":"®","Ŕ":"Ŕ","⟫":"⟫","↠":"↠","⤖":"⤖","Ř":"Ř","Ŗ":"Ŗ","Р":"Р","ℜ":"ℜ","∋":"∋","⇋":"⇋","⥯":"⥯","Ρ":"Ρ","⟩":"⟩","→":"→","⇥":"⇥","⇄":"⇄","⌉":"⌉","⟧":"⟧","⥝":"⥝","⇂":"⇂","⥕":"⥕","⌋":"⌋","⊢":"⊢","↦":"↦","⥛":"⥛","⊳":"⊳","⧐":"⧐","⊵":"⊵","⥏":"⥏","⥜":"⥜","↾":"↾","⥔":"⥔","⇀":"⇀","⥓":"⥓","ℝ":"ℝ","⥰":"⥰","⇛":"⇛","ℛ":"ℛ","↱":"↱","⧴":"⧴","Щ":"Щ","Ш":"Ш","Ь":"Ь","Ś":"Ś","⪼":"⪼","Š":"Š","Ş":"Ş","Ŝ":"Ŝ","С":"С","𝔖":"𝔖","↑":"↑","Σ":"Σ","∘":"∘","𝕊":"𝕊","√":"√","□":"□","⊓":"⊓","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊔":"⊔","𝒮":"𝒮","⋆":"⋆","⋐":"⋐","⊆":"⊆","≻":"≻","⪰":"⪰","≽":"≽","≿":"≿","∑":"∑","⋑":"⋑","⊃":"⊃","⊇":"⊇","Þ":"Þ","™":"™","Ћ":"Ћ","Ц":"Ц","\t":" ","Τ":"Τ","Ť":"Ť","Ţ":"Ţ","Т":"Т","𝔗":"𝔗","∴":"∴","Θ":"Θ","  ":"  "," ":" ","∼":"∼","≃":"≃","≅":"≅","≈":"≈","𝕋":"𝕋","⃛":"⃛","𝒯":"𝒯","Ŧ":"Ŧ","Ú":"Ú","↟":"↟","⥉":"⥉","Ў":"Ў","Ŭ":"Ŭ","Û":"Û","У":"У","Ű":"Ű","𝔘":"𝔘","Ù":"Ù","Ū":"Ū",_:"_","⏟":"⏟","⎵":"⎵","⏝":"⏝","⋃":"⋃","⊎":"⊎","Ų":"Ų","𝕌":"𝕌","⤒":"⤒","⇅":"⇅","↕":"↕","⥮":"⥮","⊥":"⊥","↥":"↥","↖":"↖","↗":"↗","ϒ":"ϒ","Υ":"Υ","Ů":"Ů","𝒰":"𝒰","Ũ":"Ũ","Ü":"Ü","⊫":"⊫","⫫":"⫫","В":"В","⊩":"⊩","⫦":"⫦","⋁":"⋁","‖":"‖","∣":"∣","|":"|","❘":"❘","≀":"≀"," ":" ","𝔙":"𝔙","𝕍":"𝕍","𝒱":"𝒱","⊪":"⊪","Ŵ":"Ŵ","⋀":"⋀","𝔚":"𝔚","𝕎":"𝕎","𝒲":"𝒲","𝔛":"𝔛","Ξ":"Ξ","𝕏":"𝕏","𝒳":"𝒳","Я":"Я","Ї":"Ї","Ю":"Ю","Ý":"Ý","Ŷ":"Ŷ","Ы":"Ы","𝔜":"𝔜","𝕐":"𝕐","𝒴":"𝒴","Ÿ":"Ÿ","Ж":"Ж","Ź":"Ź","Ž":"Ž","З":"З","Ż":"Ż","Ζ":"Ζ","ℨ":"ℨ","ℤ":"ℤ","𝒵":"𝒵","á":"á","ă":"ă","∾":"∾","∾̳":"∾̳","∿":"∿","â":"â","а":"а","æ":"æ","𝔞":"𝔞","à":"à","ℵ":"ℵ","α":"α","ā":"ā","⨿":"⨿","∧":"∧","⩕":"⩕","⩜":"⩜","⩘":"⩘","⩚":"⩚","∠":"∠","⦤":"⦤","∡":"∡","⦨":"⦨","⦩":"⦩","⦪":"⦪","⦫":"⦫","⦬":"⦬","⦭":"⦭","⦮":"⦮","⦯":"⦯","∟":"∟","⊾":"⊾","⦝":"⦝","∢":"∢","⍼":"⍼","ą":"ą","𝕒":"𝕒","⩰":"⩰","⩯":"⩯","≊":"≊","≋":"≋","'":"'","å":"å","𝒶":"𝒶","*":"*","ã":"ã","ä":"ä","⨑":"⨑","⫭":"⫭","≌":"≌","϶":"϶","‵":"‵","∽":"∽","⋍":"⋍","⊽":"⊽","⌅":"⌅","⎶":"⎶","б":"б","„":"„","⦰":"⦰","β":"β","ℶ":"ℶ","≬":"≬","𝔟":"𝔟","◯":"◯","⨀":"⨀","⨁":"⨁","⨂":"⨂","⨆":"⨆","★":"★","▽":"▽","△":"△","⨄":"⨄","⤍":"⤍","⧫":"⧫","▴":"▴","▾":"▾","◂":"◂","▸":"▸","␣":"␣","▒":"▒","░":"░","▓":"▓","█":"█","=⃥":"=⃥","≡⃥":"≡⃥","⌐":"⌐","𝕓":"𝕓","⋈":"⋈","╗":"╗","╔":"╔","╖":"╖","╓":"╓","═":"═","╦":"╦","╩":"╩","╤":"╤","╧":"╧","╝":"╝","╚":"╚","╜":"╜","╙":"╙","║":"║","╬":"╬","╣":"╣","╠":"╠","╫":"╫","╢":"╢","╟":"╟","⧉":"⧉","╕":"╕","╒":"╒","┐":"┐","┌":"┌","╥":"╥","╨":"╨","┬":"┬","┴":"┴","⊟":"⊟","⊞":"⊞","⊠":"⊠","╛":"╛","╘":"╘","┘":"┘","└":"└","│":"│","╪":"╪","╡":"╡","╞":"╞","┼":"┼","┤":"┤","├":"├","¦":"¦","𝒷":"𝒷","⁏":"⁏","\\":"\","⧅":"⧅","⟈":"⟈","•":"•","⪮":"⪮","ć":"ć","∩":"∩","⩄":"⩄","⩉":"⩉","⩋":"⩋","⩇":"⩇","⩀":"⩀","∩︀":"∩︀","⁁":"⁁","⩍":"⩍","č":"č","ç":"ç","ĉ":"ĉ","⩌":"⩌","⩐":"⩐","ċ":"ċ","⦲":"⦲","¢":"¢","𝔠":"𝔠","ч":"ч","✓":"✓","χ":"χ","○":"○","⧃":"⧃","ˆ":"ˆ","≗":"≗","↺":"↺","↻":"↻","Ⓢ":"Ⓢ","⊛":"⊛","⊚":"⊚","⊝":"⊝","⨐":"⨐","⫯":"⫯","⧂":"⧂","♣":"♣",":":":",",":",","@":"@","∁":"∁","⩭":"⩭","𝕔":"𝕔","℗":"℗","↵":"↵","✗":"✗","𝒸":"𝒸","⫏":"⫏","⫑":"⫑","⫐":"⫐","⫒":"⫒","⋯":"⋯","⤸":"⤸","⤵":"⤵","⋞":"⋞","⋟":"⋟","↶":"↶","⤽":"⤽","∪":"∪","⩈":"⩈","⩆":"⩆","⩊":"⩊","⊍":"⊍","⩅":"⩅","∪︀":"∪︀","↷":"↷","⤼":"⤼","⋎":"⋎","⋏":"⋏","¤":"¤","∱":"∱","⌭":"⌭","⥥":"⥥","†":"†","ℸ":"ℸ","‐":"‐","⤏":"⤏","ď":"ď","д":"д","⇊":"⇊","⩷":"⩷","°":"°","δ":"δ","⦱":"⦱","⥿":"⥿","𝔡":"𝔡","♦":"♦","ϝ":"ϝ","⋲":"⋲","÷":"÷","⋇":"⋇","ђ":"ђ","⌞":"⌞","⌍":"⌍",$:"$","𝕕":"𝕕","≑":"≑","∸":"∸","∔":"∔","⊡":"⊡","⌟":"⌟","⌌":"⌌","𝒹":"𝒹","ѕ":"ѕ","⧶":"⧶","đ":"đ","⋱":"⋱","▿":"▿","⦦":"⦦","џ":"џ","⟿":"⟿","é":"é","⩮":"⩮","ě":"ě","≖":"≖","ê":"ê","≕":"≕","э":"э","ė":"ė","≒":"≒","𝔢":"𝔢","⪚":"⪚","è":"è","⪖":"⪖","⪘":"⪘","⪙":"⪙","⏧":"⏧","ℓ":"ℓ","⪕":"⪕","⪗":"⪗","ē":"ē","∅":"∅"," ":" "," ":" "," ":" ","ŋ":"ŋ"," ":" ","ę":"ę","𝕖":"𝕖","⋕":"⋕","⧣":"⧣","⩱":"⩱","ε":"ε","ϵ":"ϵ","=":"=","≟":"≟","⩸":"⩸","⧥":"⧥","≓":"≓","⥱":"⥱","ℯ":"ℯ","η":"η","ð":"ð","ë":"ë","€":"€","!":"!","ф":"ф","♀":"♀","ffi":"ffi","ff":"ff","ffl":"ffl","𝔣":"𝔣","fi":"fi",fj:"fj","♭":"♭","fl":"fl","▱":"▱","ƒ":"ƒ","𝕗":"𝕗","⋔":"⋔","⫙":"⫙","⨍":"⨍","½":"½","⅓":"⅓","¼":"¼","⅕":"⅕","⅙":"⅙","⅛":"⅛","⅔":"⅔","⅖":"⅖","¾":"¾","⅗":"⅗","⅜":"⅜","⅘":"⅘","⅚":"⅚","⅝":"⅝","⅞":"⅞","⁄":"⁄","⌢":"⌢","𝒻":"𝒻","⪌":"⪌","ǵ":"ǵ","γ":"γ","⪆":"⪆","ğ":"ğ","ĝ":"ĝ","г":"г","ġ":"ġ","⪩":"⪩","⪀":"⪀","⪂":"⪂","⪄":"⪄","⋛︀":"⋛︀","⪔":"⪔","𝔤":"𝔤","ℷ":"ℷ","ѓ":"ѓ","⪒":"⪒","⪥":"⪥","⪤":"⪤","≩":"≩","⪊":"⪊","⪈":"⪈","⋧":"⋧","𝕘":"𝕘","ℊ":"ℊ","⪎":"⪎","⪐":"⪐","⪧":"⪧","⩺":"⩺","⋗":"⋗","⦕":"⦕","⩼":"⩼","⥸":"⥸","≩︀":"≩︀","ъ":"ъ","⥈":"⥈","↭":"↭","ℏ":"ℏ","ĥ":"ĥ","♥":"♥","…":"…","⊹":"⊹","𝔥":"𝔥","⤥":"⤥","⤦":"⤦","⇿":"⇿","∻":"∻","↩":"↩","↪":"↪","𝕙":"𝕙","―":"―","𝒽":"𝒽","ħ":"ħ","⁃":"⁃","í":"í","î":"î","и":"и","е":"е","¡":"¡","𝔦":"𝔦","ì":"ì","⨌":"⨌","∭":"∭","⧜":"⧜","℩":"℩","ij":"ij","ī":"ī","ı":"ı","⊷":"⊷","Ƶ":"Ƶ","℅":"℅","∞":"∞","⧝":"⧝","⊺":"⊺","⨗":"⨗","⨼":"⨼","ё":"ё","į":"į","𝕚":"𝕚","ι":"ι","¿":"¿","𝒾":"𝒾","⋹":"⋹","⋵":"⋵","⋴":"⋴","⋳":"⋳","ĩ":"ĩ","і":"і","ï":"ï","ĵ":"ĵ","й":"й","𝔧":"𝔧","ȷ":"ȷ","𝕛":"𝕛","𝒿":"𝒿","ј":"ј","є":"є","κ":"κ","ϰ":"ϰ","ķ":"ķ","к":"к","𝔨":"𝔨","ĸ":"ĸ","х":"х","ќ":"ќ","𝕜":"𝕜","𝓀":"𝓀","⤛":"⤛","⤎":"⤎","⪋":"⪋","⥢":"⥢","ĺ":"ĺ","⦴":"⦴","λ":"λ","⦑":"⦑","⪅":"⪅","«":"«","⤟":"⤟","⤝":"⤝","↫":"↫","⤹":"⤹","⥳":"⥳","↢":"↢","⪫":"⪫","⤙":"⤙","⪭":"⪭","⪭︀":"⪭︀","⤌":"⤌","❲":"❲","{":"{","[":"[","⦋":"⦋","⦏":"⦏","⦍":"⦍","ľ":"ľ","ļ":"ļ","л":"л","⤶":"⤶","⥧":"⥧","⥋":"⥋","↲":"↲","≤":"≤","⇇":"⇇","⋋":"⋋","⪨":"⪨","⩿":"⩿","⪁":"⪁","⪃":"⪃","⋚︀":"⋚︀","⪓":"⪓","⋖":"⋖","⥼":"⥼","𝔩":"𝔩","⪑":"⪑","⥪":"⥪","▄":"▄","љ":"љ","⥫":"⥫","◺":"◺","ŀ":"ŀ","⎰":"⎰","≨":"≨","⪉":"⪉","⪇":"⪇","⋦":"⋦","⟬":"⟬","⇽":"⇽","⟼":"⟼","↬":"↬","⦅":"⦅","𝕝":"𝕝","⨭":"⨭","⨴":"⨴","∗":"∗","◊":"◊","(":"(","⦓":"⦓","⥭":"⥭","‎":"‎","⊿":"⊿","‹":"‹","𝓁":"𝓁","⪍":"⪍","⪏":"⪏","‚":"‚","ł":"ł","⪦":"⪦","⩹":"⩹","⋉":"⋉","⥶":"⥶","⩻":"⩻","⦖":"⦖","◃":"◃","⥊":"⥊","⥦":"⥦","≨︀":"≨︀","∺":"∺","¯":"¯","♂":"♂","✠":"✠","▮":"▮","⨩":"⨩","м":"м","—":"—","𝔪":"𝔪","℧":"℧","µ":"µ","⫰":"⫰","−":"−","⨪":"⨪","⫛":"⫛","⊧":"⊧","𝕞":"𝕞","𝓂":"𝓂","μ":"μ","⊸":"⊸","⋙̸":"⋙̸","≫⃒":"≫⃒","⇍":"⇍","⇎":"⇎","⋘̸":"⋘̸","≪⃒":"≪⃒","⇏":"⇏","⊯":"⊯","⊮":"⊮","ń":"ń","∠⃒":"∠⃒","⩰̸":"⩰̸","≋̸":"≋̸","ʼn":"ʼn","♮":"♮","⩃":"⩃","ň":"ň","ņ":"ņ","⩭̸":"⩭̸","⩂":"⩂","н":"н","–":"–","⇗":"⇗","⤤":"⤤","≐̸":"≐̸","⤨":"⤨","𝔫":"𝔫","↮":"↮","⫲":"⫲","⋼":"⋼","⋺":"⋺","њ":"њ","≦̸":"≦̸","↚":"↚","‥":"‥","𝕟":"𝕟","¬":"¬","⋹̸":"⋹̸","⋵̸":"⋵̸","⋷":"⋷","⋶":"⋶","⋾":"⋾","⋽":"⋽","⫽⃥":"⫽⃥","∂̸":"∂̸","⨔":"⨔","↛":"↛","⤳̸":"⤳̸","↝̸":"↝̸","𝓃":"𝓃","⊄":"⊄","⫅̸":"⫅̸","⊅":"⊅","⫆̸":"⫆̸","ñ":"ñ","ν":"ν","#":"#","№":"№"," ":" ","⊭":"⊭","⤄":"⤄","≍⃒":"≍⃒","⊬":"⊬","≥⃒":"≥⃒",">⃒":">⃒","⧞":"⧞","⤂":"⤂","≤⃒":"≤⃒","<⃒":"<⃒","⊴⃒":"⊴⃒","⤃":"⤃","⊵⃒":"⊵⃒","∼⃒":"∼⃒","⇖":"⇖","⤣":"⤣","⤧":"⤧","ó":"ó","ô":"ô","о":"о","ő":"ő","⨸":"⨸","⦼":"⦼","œ":"œ","⦿":"⦿","𝔬":"𝔬","˛":"˛","ò":"ò","⧁":"⧁","⦵":"⦵","⦾":"⦾","⦻":"⦻","⧀":"⧀","ō":"ō","ω":"ω","ο":"ο","⦶":"⦶","𝕠":"𝕠","⦷":"⦷","⦹":"⦹","∨":"∨","⩝":"⩝","ℴ":"ℴ","ª":"ª","º":"º","⊶":"⊶","⩖":"⩖","⩗":"⩗","⩛":"⩛","ø":"ø","⊘":"⊘","õ":"õ","⨶":"⨶","ö":"ö","⌽":"⌽","¶":"¶","⫳":"⫳","⫽":"⫽","п":"п","%":"%",".":".","‰":"‰","‱":"‱","𝔭":"𝔭","φ":"φ","ϕ":"ϕ","☎":"☎","π":"π","ϖ":"ϖ","ℎ":"ℎ","+":"+","⨣":"⨣","⨢":"⨢","⨥":"⨥","⩲":"⩲","⨦":"⨦","⨧":"⨧","⨕":"⨕","𝕡":"𝕡","£":"£","⪳":"⪳","⪷":"⪷","⪹":"⪹","⪵":"⪵","⋨":"⋨","′":"′","⌮":"⌮","⌒":"⌒","⌓":"⌓","⊰":"⊰","𝓅":"𝓅","ψ":"ψ"," ":" ","𝔮":"𝔮","𝕢":"𝕢","⁗":"⁗","𝓆":"𝓆","⨖":"⨖","?":"?","⤜":"⤜","⥤":"⥤","∽̱":"∽̱","ŕ":"ŕ","⦳":"⦳","⦒":"⦒","⦥":"⦥","»":"»","⥵":"⥵","⤠":"⤠","⤳":"⤳","⤞":"⤞","⥅":"⥅","⥴":"⥴","↣":"↣","↝":"↝","⤚":"⤚","∶":"∶","❳":"❳","}":"}","]":"]","⦌":"⦌","⦎":"⦎","⦐":"⦐","ř":"ř","ŗ":"ŗ","р":"р","⤷":"⤷","⥩":"⥩","↳":"↳","▭":"▭","⥽":"⥽","𝔯":"𝔯","⥬":"⥬","ρ":"ρ","ϱ":"ϱ","⇉":"⇉","⋌":"⋌","˚":"˚","‏":"‏","⎱":"⎱","⫮":"⫮","⟭":"⟭","⇾":"⇾","⦆":"⦆","𝕣":"𝕣","⨮":"⨮","⨵":"⨵",")":")","⦔":"⦔","⨒":"⨒","›":"›","𝓇":"𝓇","⋊":"⋊","▹":"▹","⧎":"⧎","⥨":"⥨","℞":"℞","ś":"ś","⪴":"⪴","⪸":"⪸","š":"š","ş":"ş","ŝ":"ŝ","⪶":"⪶","⪺":"⪺","⋩":"⋩","⨓":"⨓","с":"с","⋅":"⋅","⩦":"⩦","⇘":"⇘","§":"§",";":";","⤩":"⤩","✶":"✶","𝔰":"𝔰","♯":"♯","щ":"щ","ш":"ш","­":"­","σ":"σ","ς":"ς","⩪":"⩪","⪞":"⪞","⪠":"⪠","⪝":"⪝","⪟":"⪟","≆":"≆","⨤":"⨤","⥲":"⥲","⨳":"⨳","⧤":"⧤","⌣":"⌣","⪪":"⪪","⪬":"⪬","⪬︀":"⪬︀","ь":"ь","/":"/","⧄":"⧄","⌿":"⌿","𝕤":"𝕤","♠":"♠","⊓︀":"⊓︀","⊔︀":"⊔︀","𝓈":"𝓈","☆":"☆","⊂":"⊂","⫅":"⫅","⪽":"⪽","⫃":"⫃","⫁":"⫁","⫋":"⫋","⊊":"⊊","⪿":"⪿","⥹":"⥹","⫇":"⫇","⫕":"⫕","⫓":"⫓","♪":"♪","¹":"¹","²":"²","³":"³","⫆":"⫆","⪾":"⪾","⫘":"⫘","⫄":"⫄","⟉":"⟉","⫗":"⫗","⥻":"⥻","⫂":"⫂","⫌":"⫌","⊋":"⊋","⫀":"⫀","⫈":"⫈","⫔":"⫔","⫖":"⫖","⇙":"⇙","⤪":"⤪","ß":"ß","⌖":"⌖","τ":"τ","ť":"ť","ţ":"ţ","т":"т","⌕":"⌕","𝔱":"𝔱","θ":"θ","ϑ":"ϑ","þ":"þ","×":"×","⨱":"⨱","⨰":"⨰","⌶":"⌶","⫱":"⫱","𝕥":"𝕥","⫚":"⫚","‴":"‴","▵":"▵","≜":"≜","◬":"◬","⨺":"⨺","⨹":"⨹","⧍":"⧍","⨻":"⨻","⏢":"⏢","𝓉":"𝓉","ц":"ц","ћ":"ћ","ŧ":"ŧ","⥣":"⥣","ú":"ú","ў":"ў","ŭ":"ŭ","û":"û","у":"у","ű":"ű","⥾":"⥾","𝔲":"𝔲","ù":"ù","▀":"▀","⌜":"⌜","⌏":"⌏","◸":"◸","ū":"ū","ų":"ų","𝕦":"𝕦","υ":"υ","⇈":"⇈","⌝":"⌝","⌎":"⌎","ů":"ů","◹":"◹","𝓊":"𝓊","⋰":"⋰","ũ":"ũ","ü":"ü","⦧":"⦧","⫨":"⫨","⫩":"⫩","⦜":"⦜","⊊︀":"⊊︀","⫋︀":"⫋︀","⊋︀":"⊋︀","⫌︀":"⫌︀","в":"в","⊻":"⊻","≚":"≚","⋮":"⋮","𝔳":"𝔳","𝕧":"𝕧","𝓋":"𝓋","⦚":"⦚","ŵ":"ŵ","⩟":"⩟","≙":"≙","℘":"℘","𝔴":"𝔴","𝕨":"𝕨","𝓌":"𝓌","𝔵":"𝔵","ξ":"ξ","⋻":"⋻","𝕩":"𝕩","𝓍":"𝓍","ý":"ý","я":"я","ŷ":"ŷ","ы":"ы","¥":"¥","𝔶":"𝔶","ї":"ї","𝕪":"𝕪","𝓎":"𝓎","ю":"ю","ÿ":"ÿ","ź":"ź","ž":"ž","з":"з","ż":"ż","ζ":"ζ","𝔷":"𝔷","ж":"ж","⇝":"⇝","𝕫":"𝕫","𝓏":"𝓏","‍":"‍","‌":"‌"}}}; +//# sourceMappingURL=./named-references.js.map -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/***/ }), + +/***/ 45439: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value:true}));exports.numericUnicodeMap={0:65533,128:8364,130:8218,131:402,132:8222,133:8230,134:8224,135:8225,136:710,137:8240,138:352,139:8249,140:338,142:381,145:8216,146:8217,147:8220,148:8221,149:8226,150:8211,151:8212,152:732,153:8482,154:353,155:8250,156:339,158:382,159:376}; +//# sourceMappingURL=./numeric-unicode-map.js.map + +/***/ }), + +/***/ 1454: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value:true}));exports.fromCodePoint=String.fromCodePoint||function(astralCodePoint){return String.fromCharCode(Math.floor((astralCodePoint-65536)/1024)+55296,(astralCodePoint-65536)%1024+56320)};exports.getCodePoint=String.prototype.codePointAt?function(input,position){return input.codePointAt(position)}:function(input,position){return(input.charCodeAt(position)-55296)*1024+input.charCodeAt(position+1)-56320+65536};exports.highSurrogateFrom=55296;exports.highSurrogateTo=56319; +//# sourceMappingURL=./surrogate-pairs.js.map + +/***/ }), + +/***/ 77492: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getErrorFromOAuthErrorResponse = exports.OAuthClientAuthHandler = void 0; -const querystring = __nccwpck_require__(3477); -const crypto_1 = __nccwpck_require__(8043); -/** List of HTTP methods that accept request bodies. */ -const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +const net_1 = __importDefault(__nccwpck_require__(41808)); +const tls_1 = __importDefault(__nccwpck_require__(24404)); +const url_1 = __importDefault(__nccwpck_require__(57310)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const once_1 = __importDefault(__nccwpck_require__(81040)); +const agent_base_1 = __nccwpck_require__(97635); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} /** - * Abstract class for handling client authentication in OAuth-based - * operations. - * When request-body client authentication is used, only application/json and - * application/x-www-form-urlencoded content types for HTTP methods that support - * request bodies are supported. + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public */ -class OAuthClientAuthHandler { - /** - * Instantiates an OAuth client authentication handler. - * @param clientAuthentication The client auth credentials. - */ - constructor(clientAuthentication) { - this.clientAuthentication = clientAuthentication; - this.crypto = (0, crypto_1.createCrypto)(); - } - /** - * Applies client authentication on the OAuth request's headers or POST - * body but does not process the request. - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - * @param bearerToken The optional bearer token to use for authentication. - * When this is used, no client authentication credentials are needed. - */ - applyClientAuthenticationOptions(opts, bearerToken) { - // Inject authenticated header. - this.injectAuthenticatedHeaders(opts, bearerToken); - // Inject authenticated request body. - if (!bearerToken) { - this.injectAuthenticatedRequestBody(opts); +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); } - } - /** - * Applies client authentication on the request's header if either - * basic authentication or bearer token authentication is selected. - * - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. - * @param bearerToken The optional bearer token to use for authentication. - * When this is used, no client authentication credentials are needed. - */ - injectAuthenticatedHeaders(opts, bearerToken) { - var _a; - // Bearer token prioritized higher than basic Auth. - if (bearerToken) { - opts.headers = opts.headers || {}; - Object.assign(opts.headers, { - Authorization: `Bearer ${bearerToken}}`, - }); + else { + opts = _opts; } - else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { - opts.headers = opts.headers || {}; - const clientId = this.clientAuthentication.clientId; - const clientSecret = this.clientAuthentication.clientSecret || ''; - const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); - Object.assign(opts.headers, { - Authorization: `Basic ${base64EncodedCreds}`, - }); + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; } + this.proxy = proxy; } /** - * Applies client authentication on the request's body if request-body - * client authentication is selected. + * Called when the node-core HTTP client library is creating a + * new HTTP request. * - * @param opts The GaxiosOptions whose headers or data are to be modified - * depending on the client authentication mechanism to be used. + * @api protected */ - injectAuthenticatedRequestBody(opts) { - var _a; - if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { - const method = (opts.method || 'GET').toUpperCase(); - // Inject authenticated request body. - if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { - // Get content-type. - let contentType; - const headers = opts.headers || {}; - for (const key in headers) { - if (key.toLowerCase() === 'content-type' && headers[key]) { - contentType = headers[key].toLowerCase(); - break; - } - } - if (contentType === 'application/x-www-form-urlencoded') { - opts.data = opts.data || ''; - const data = querystring.parse(opts.data); - Object.assign(data, { - client_id: this.clientAuthentication.clientId, - client_secret: this.clientAuthentication.clientSecret || '', - }); - opts.data = querystring.stringify(data); - } - else if (contentType === 'application/json') { - opts.data = opts.data || {}; - Object.assign(opts.data, { - client_id: this.clientAuthentication.clientId, - client_secret: this.clientAuthentication.clientSecret || '', - }); - } - else { - throw new Error(`${contentType} content-types are not supported with ` + - `${this.clientAuthentication.confidentialClientType} ` + - 'client authentication'); - } + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); } else { - throw new Error(`${method} HTTP method does not support ` + - `${this.clientAuthentication.confidentialClientType} ` + - 'client authentication'); + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); } - } - } - /** - * Retry config for Auth-related requests. - * - * @remarks - * - * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} - * config as some downstream APIs would prefer if customers explicitly enable retries, - * such as GCS. - */ - static get RETRY_CONFIG() { - return { - retry: true, - retryConfig: { - httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], - }, - }; - } -} -exports.OAuthClientAuthHandler = OAuthClientAuthHandler; -/** - * Converts an OAuth error response to a native JavaScript Error. - * @param resp The OAuth error response to convert to a native Error object. - * @param err The optional original error. If provided, the error properties - * will be copied to the new error. - * @return The converted native Error object. - */ -function getErrorFromOAuthErrorResponse(resp, err) { - // Error response. - const errorCode = resp.error; - const errorDescription = resp.error_description; - const errorUri = resp.error_uri; - let message = `Error code ${errorCode}`; - if (typeof errorDescription !== 'undefined') { - message += `: ${errorDescription}`; - } - if (typeof errorUri !== 'undefined') { - message += ` - ${errorUri}`; - } - const newError = new Error(message); - // Copy properties from original error to newly generated error. - if (err) { - const keys = Object.keys(err); - if (err.stack) { - // Copy error.stack if available. - keys.push('stack'); - } - keys.forEach(key => { - // Do not overwrite the message field. - if (key !== 'message') { - Object.defineProperty(newError, key, { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - value: err[key], - writable: false, - enumerable: true, - }); + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; }); } - return newError; } -exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; - +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map /***/ }), -/***/ 2460: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 23764: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PassThroughClient = void 0; -const authclient_1 = __nccwpck_require__(4627); -/** - * An AuthClient without any Authentication information. Useful for: - * - Anonymous access - * - Local Emulators - * - Testing Environments - * - */ -class PassThroughClient extends authclient_1.AuthClient { - /** - * Creates a request without any authentication headers or checks. - * - * @remarks - * - * In testing environments it may be useful to change the provided - * {@link AuthClient.transporter} for any desired request overrides/handling. - * - * @param opts - * @returns The response of the request. - */ - async request(opts) { - return this.transporter.request(opts); - } - /** - * A required method of the base class. - * Always will return an empty object. - * - * @returns {} - */ - async getAccessToken() { - return {}; - } - /** - * A required method of the base class. - * Always will return an empty object. - * - * @returns {} - */ - async getRequestHeaders() { - return {}; - } +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(77492)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); } -exports.PassThroughClient = PassThroughClient; -const a = new PassThroughClient(); -a.getAccessToken(); - +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 4782: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 97635: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PluggableAuthClient = exports.ExecutableError = void 0; -const baseexternalclient_1 = __nccwpck_require__(7391); -const executable_response_1 = __nccwpck_require__(8749); -const pluggable_auth_handler_1 = __nccwpck_require__(8941); -/** - * Error thrown from the executable run by PluggableAuthClient. - */ -class ExecutableError extends Error { - constructor(message, code) { - super(`The executable failed with exit code: ${code} and error message: ${message}.`); - this.code = code; - Object.setPrototypeOf(this, new.target.prototype); - } +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(82361); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const promisify_1 = __importDefault(__nccwpck_require__(79965)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; } -exports.ExecutableError = ExecutableError; -/** - * The default executable timeout when none is provided, in milliseconds. - */ -const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; -/** - * The minimum allowed executable timeout in milliseconds. - */ -const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; -/** - * The maximum allowed executable timeout in milliseconds. - */ -const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; -/** - * The environment variable to check to see if executable can be run. - * Value must be set to '1' for the executable to run. - */ -const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; -/** - * The maximum currently supported executable version. - */ -const MAXIMUM_EXECUTABLE_VERSION = 1; -/** - * PluggableAuthClient enables the exchange of workload identity pool external credentials for - * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These - * scripts/executables are completely independent of the Google Cloud Auth libraries. These - * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token - * to be exchanged for a Google access token. - * - *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable - * must be set to '1'. This is for security reasons. - * - *

Both OIDC and SAML are supported. The executable must adhere to a specific response format - * defined below. - * - *

The executable must print out the 3rd party token to STDOUT in JSON format. When an - * output_file is specified in the credential configuration, the executable must also handle writing the - * JSON response to this file. - * - *

- * OIDC response sample:
- * {
- *   "version": 1,
- *   "success": true,
- *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
- *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
- *   "expiration_time": 1620433341
- * }
- *
- * SAML2 response sample:
- * {
- *   "version": 1,
- *   "success": true,
- *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
- *   "saml_response": "...",
- *   "expiration_time": 1620433341
- * }
- *
- * Error response sample:
- * {
- *   "version": 1,
- *   "success": false,
- *   "code": "401",
- *   "message": "Error message."
- * }
- * 
- * - *

The "expiration_time" field in the JSON response is only required for successful - * responses when an output file was specified in the credential configuration - * - *

The auth libraries will populate certain environment variables that will be accessible by the - * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, - * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and - * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. - * - *

Please see this repositories README for a complete executable request/response specification. - */ -class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { /** - * Instantiates a PluggableAuthClient instance using the provided JSON - * object loaded from an external account credentials file. - * An error is thrown if the credential is not a valid pluggable auth credential. - * @param options The external account options object typically loaded from - * the external account JSON credential file. - * @param additionalOptions **DEPRECATED, all options are available in the - * `options` parameter.** Optional additional behavior customization options. - * These currently customize expiration threshold time and whether to retry - * on 401/403 API request errors. + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public */ - constructor(options, additionalOptions) { - super(options, additionalOptions); - if (!options.credential_source.executable) { - throw new Error('No valid Pluggable Auth "credential_source" provided.'); + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; } - this.command = options.credential_source.executable.command; - if (!this.command) { - throw new Error('No valid Pluggable Auth "credential_source" provided.'); + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; } - // Check if the provided timeout exists and if it is valid. - if (options.credential_source.executable.timeout_millis === undefined) { - this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + set defaultPort(v) { + this.explicitDefaultPort = v; } - else { - this.timeoutMillis = options.credential_source.executable.timeout_millis; - if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || - this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { - throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + - `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; } + return isSecureEndpoint() ? 'https:' : 'http:'; } - this.outputFile = options.credential_source.executable.output_file; - this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ - command: this.command, - timeoutMillis: this.timeoutMillis, - outputFile: this.outputFile, - }); - this.credentialSourceType = 'executable'; - } - /** - * Triggered when an external subject token is needed to be exchanged for a - * GCP access token via GCP STS endpoint. - * This uses the `options.credential_source` object to figure out how - * to retrieve the token using the current environment. In this case, - * this calls a user provided executable which returns the subject token. - * The logic is summarized as: - * 1. Validated that the executable is allowed to run. The - * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to - * 1 for security reasons. - * 2. If an output file is specified by the user, check the file location - * for a response. If the file exists and contains a valid response, - * return the subject token from the file. - * 3. Call the provided executable and return response. - * @return A promise that resolves with the external subject token. - */ - async retrieveSubjectToken() { - // Check if the executable is allowed to run. - if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { - throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + - 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + - 'Variable to 1.'); + set protocol(v) { + this.explicitProtocol = v; } - let executableResponse = undefined; - // Try to get cached executable response from output file. - if (this.outputFile) { - executableResponse = await this.handler.retrieveCachedResponse(); + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); } - // If no response from output file, call the executable. - if (!executableResponse) { - // Set up environment map with required values for the executable. - const envMap = new Map(); - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); - // Always set to 0 because interactive mode is not supported. - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); - if (this.outputFile) { - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); } - const serviceAccountEmail = this.getServiceAccountEmail(); - if (serviceAccountEmail) { - envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); } - executableResponse = - await this.handler.retrieveResponseFromExecutable(envMap); - } - if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { - throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); - } - // Check that response was successful. - if (!executableResponse.success) { - throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); - } - // Check that response contains expiration time if output file was specified. - if (this.outputFile) { - if (!executableResponse.expirationTime) { - throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + catch (err) { + Promise.reject(err).catch(callbackError); } } - // Check that response is not expired. - if (executableResponse.isExpired()) { - throw new Error('Executable response is expired.'); + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); } - // Return subject token from response. - return executableResponse.subjectToken; } -} -exports.PluggableAuthClient = PluggableAuthClient; - + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 8941: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 79965: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PluggableAuthHandler = void 0; -const pluggable_auth_client_1 = __nccwpck_require__(4782); -const executable_response_1 = __nccwpck_require__(8749); -const childProcess = __nccwpck_require__(2081); -const fs = __nccwpck_require__(7147); -/** - * A handler used to retrieve 3rd party token responses from user defined - * executables and cached file output for the PluggableAuthClient class. - */ -class PluggableAuthHandler { - /** - * Instantiates a PluggableAuthHandler instance using the provided - * PluggableAuthHandlerOptions object. - */ - constructor(options) { - if (!options.command) { - throw new Error('No command provided.'); - } - this.commandComponents = PluggableAuthHandler.parseCommand(options.command); - this.timeoutMillis = options.timeoutMillis; - if (!this.timeoutMillis) { - throw new Error('No timeoutMillis provided.'); - } - this.outputFile = options.outputFile; - } - /** - * Calls user provided executable to get a 3rd party subject token and - * returns the response. - * @param envMap a Map of additional Environment Variables required for - * the executable. - * @return A promise that resolves with the executable response. - */ - retrieveResponseFromExecutable(envMap) { +function promisify(fn) { + return function (req, opts) { return new Promise((resolve, reject) => { - // Spawn process to run executable using added environment variables. - const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { - env: { ...process.env, ...Object.fromEntries(envMap) }, - }); - let output = ''; - // Append stdout to output as executable runs. - child.stdout.on('data', (data) => { - output += data; - }); - // Append stderr as executable runs. - child.stderr.on('data', (err) => { - output += err; - }); - // Set up a timeout to end the child process and throw an error. - const timeout = setTimeout(() => { - // Kill child process and remove listeners so 'close' event doesn't get - // read after child process is killed. - child.removeAllListeners(); - child.kill(); - return reject(new Error('The executable failed to finish within the timeout specified.')); - }, this.timeoutMillis); - child.on('close', (code) => { - // Cancel timeout if executable closes before timeout is reached. - clearTimeout(timeout); - if (code === 0) { - // If the executable completed successfully, try to return the parsed response. - try { - const responseJson = JSON.parse(output); - const response = new executable_response_1.ExecutableResponse(responseJson); - return resolve(response); - } - catch (error) { - if (error instanceof executable_response_1.ExecutableResponseError) { - return reject(error); - } - return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); - } + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); } else { - return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + resolve(rtn); } }); }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 77219: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const tls = __importStar(__nccwpck_require__(24404)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const agent_base_1 = __nccwpck_require__(70694); +const url_1 = __nccwpck_require__(57310); +const parse_proxy_response_1 = __nccwpck_require__(595); +const debug = (0, debug_1.default)('https-proxy-agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; } /** - * Checks user provided output file for response from previous run of - * executable and return the response if it exists, is formatted correctly, and is not expired. + * Called when the node-core HTTP client library is creating a + * new HTTP request. */ - async retrieveCachedResponse() { - if (!this.outputFile || this.outputFile.length === 0) { - return undefined; + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); } - let filePath; - try { - filePath = await fs.promises.realpath(this.outputFile); + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + const servername = this.connectOpts.servername || this.connectOpts.host; + socket = tls.connect({ + ...this.connectOpts, + servername: servername && net.isIP(servername) ? undefined : servername, + }); } - catch (_a) { - // If file path cannot be resolved, return undefined. - return undefined; + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); } - if (!(await fs.promises.lstat(filePath)).isFile()) { - // If path does not lead to file, return undefined. - return undefined; + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; } - const responseString = await fs.promises.readFile(filePath, { - encoding: 'utf8', - }); - if (responseString === '') { - return undefined; + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; } - try { - const responseJson = JSON.parse(responseString); - const response = new executable_response_1.ExecutableResponse(responseJson); - // Check if response is successful and unexpired. - if (response.isValid()) { - return new executable_response_1.ExecutableResponse(responseJson); - } - return undefined; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; } - catch (error) { - if (error instanceof executable_response_1.ExecutableResponseError) { - throw error; + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); } - throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + return socket; } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; } - /** - * Parses given command string into component array, splitting on spaces unless - * spaces are between quotation marks. - */ - static parseCommand(command) { - // Split the command into components by splitting on spaces, - // unless spaces are contained in quotation marks. - const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); - if (!components) { - throw new Error(`Provided command: "${command}" could not be parsed.`); - } - // Remove quotation marks from the beginning and end of each component if they are present. - for (let i = 0; i < components.length; i++) { - if (components[i][0] === '"' && components[i].slice(-1) === '"') { - components[i] = components[i].slice(1, -1); - } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; } - return components; } + return ret; } -exports.PluggableAuthHandler = PluggableAuthHandler; - +//# sourceMappingURL=index.js.map /***/ }), -/***/ 8790: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 595: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -// Copyright 2015 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; -const oauth2client_1 = __nccwpck_require__(3936); -const querystring_1 = __nccwpck_require__(3477); -exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; -class UserRefreshClient extends oauth2client_1.OAuth2Client { - constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { - const opts = optionsOrClientId && typeof optionsOrClientId === 'object' - ? optionsOrClientId - : { - clientId: optionsOrClientId, - clientSecret, - refreshToken, - eagerRefreshThresholdMillis, - forceRefreshOnFailure, - }; - super(opts); - this._refreshToken = opts.refreshToken; - this.credentials.refresh_token = opts.refreshToken; - } - /** - * Refreshes the access token. - * @param refreshToken An ignored refreshToken.. - * @param callback Optional callback. - */ - async refreshTokenNoCache( - // eslint-disable-next-line @typescript-eslint/no-unused-vars - refreshToken) { - return super.refreshTokenNoCache(this._refreshToken); - } - async fetchIdToken(targetAudience) { - const res = await this.transporter.request({ - ...UserRefreshClient.RETRY_CONFIG, - url: this.endpoints.oauth2TokenUrl, - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, - method: 'POST', - data: (0, querystring_1.stringify)({ - client_id: this._clientId, - client_secret: this._clientSecret, - grant_type: 'refresh_token', - refresh_token: this._refreshToken, - target_audience: targetAudience, - }), - }); - return res.data.id_token; - } - /** - * Create a UserRefreshClient credentials instance using the given input - * options. - * @param json The input object. - */ - fromJSON(json) { - if (!json) { - throw new Error('Must pass in a JSON object containing the user refresh token'); - } - if (json.type !== 'authorized_user') { - throw new Error('The incoming JSON object does not have the "authorized_user" type'); - } - if (!json.client_id) { - throw new Error('The incoming JSON object does not contain a client_id field'); - } - if (!json.client_secret) { - throw new Error('The incoming JSON object does not contain a client_secret field'); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); } - if (!json.refresh_token) { - throw new Error('The incoming JSON object does not contain a refresh_token field'); + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); } - this._clientId = json.client_id; - this._clientSecret = json.client_secret; - this._refreshToken = json.refresh_token; - this.credentials.refresh_token = json.refresh_token; - this.quotaProjectId = json.quota_project_id; - this.universeDomain = json.universe_domain || this.universeDomain; - } - fromStream(inputStream, callback) { - if (callback) { - this.fromStreamAsync(inputStream).then(() => callback(), callback); + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); } - else { - return this.fromStreamAsync(inputStream); + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); } - } - async fromStreamAsync(inputStream) { - return new Promise((resolve, reject) => { - if (!inputStream) { - return reject(new Error('Must pass in a stream containing the user refresh token.')); + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; } - let s = ''; - inputStream - .setEncoding('utf8') - .on('error', reject) - .on('data', chunk => (s += chunk)) - .on('end', () => { - try { - const data = JSON.parse(s); - this.fromJSON(data); - return resolve(); + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); } - catch (err) { - return reject(err); + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, }); - }); - } + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); } -exports.UserRefreshClient = UserRefreshClient; - +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map /***/ }), -/***/ 6308: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 44124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +try { + var util = __nccwpck_require__(73837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(8544); +} -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.StsCredentials = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const querystring = __nccwpck_require__(3477); -const transporters_1 = __nccwpck_require__(2649); -const oauth2common_1 = __nccwpck_require__(9510); -/** - * Implements the OAuth 2.0 token exchange based on - * https://tools.ietf.org/html/rfc8693 - */ -class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { - /** - * Initializes an STS credentials instance. - * @param tokenExchangeEndpoint The token exchange endpoint. - * @param clientAuthentication The client authentication credentials if - * available. - */ - constructor(tokenExchangeEndpoint, clientAuthentication) { - super(clientAuthentication); - this.tokenExchangeEndpoint = tokenExchangeEndpoint; - this.transporter = new transporters_1.DefaultTransporter(); - } - /** - * Exchanges the provided token for another type of token based on the - * rfc8693 spec. - * @param stsCredentialsOptions The token exchange options used to populate - * the token exchange request. - * @param additionalHeaders Optional additional headers to pass along the - * request. - * @param options Optional additional GCP-specific non-spec defined options - * to send with the request. - * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` - * @return A promise that resolves with the token exchange response containing - * the requested token and its expiration time. - */ - async exchangeToken(stsCredentialsOptions, additionalHeaders, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - options) { - var _a, _b, _c; - const values = { - grant_type: stsCredentialsOptions.grantType, - resource: stsCredentialsOptions.resource, - audience: stsCredentialsOptions.audience, - scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), - requested_token_type: stsCredentialsOptions.requestedTokenType, - subject_token: stsCredentialsOptions.subjectToken, - subject_token_type: stsCredentialsOptions.subjectTokenType, - actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, - actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, - // Non-standard GCP-specific options. - options: options && JSON.stringify(options), - }; - // Remove undefined fields. - Object.keys(values).forEach(key => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if (typeof values[key] === 'undefined') { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - delete values[key]; - } - }); - const headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - }; - // Inject additional STS headers if available. - Object.assign(headers, additionalHeaders || {}); - const opts = { - ...StsCredentials.RETRY_CONFIG, - url: this.tokenExchangeEndpoint.toString(), - method: 'POST', - headers, - data: querystring.stringify(values), - responseType: 'json', - }; - // Apply OAuth client authentication. - this.applyClientAuthenticationOptions(opts); - try { - const response = await this.transporter.request(opts); - // Successful response. - const stsSuccessfulResponse = response.data; - stsSuccessfulResponse.res = response; - return stsSuccessfulResponse; - } - catch (error) { - // Translate error to OAuthError. - if (error instanceof gaxios_1.GaxiosError && error.response) { - throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, - // Preserve other fields from the original error. - error); - } - // Request could fail before the server responds. - throw error; + +/***/ }), + +/***/ 8544: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor } + } } -exports.StsCredentials = StsCredentials; /***/ }), -/***/ 7428: -/***/ ((__unused_webpack_module, exports) => { +/***/ 41554: +/***/ ((module) => { "use strict"; -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UrlSubjectTokenSupplier = void 0; -/** - * Internal subject token supplier implementation used when a URL - * is configured in the credential configuration used to build an {@link IdentityPoolClient} - */ -class UrlSubjectTokenSupplier { - /** - * Instantiates a URL subject token supplier. - * @param opts The URL subject token supplier options to build the supplier with. - */ - constructor(opts) { - this.url = opts.url; - this.formatType = opts.formatType; - this.subjectTokenFieldName = opts.subjectTokenFieldName; - this.headers = opts.headers; - this.additionalGaxiosOptions = opts.additionalGaxiosOptions; - } - /** - * Sends a GET request to the URL provided in the constructor and resolves - * with the returned external subject token. - * @param context {@link ExternalAccountSupplierContext} from the calling - * {@link IdentityPoolClient}, contains the requested audience and subject - * token type for the external account identity. Not used. - */ - async getSubjectToken(context) { - const opts = { - ...this.additionalGaxiosOptions, - url: this.url, - method: 'GET', - headers: this.headers, - responseType: this.formatType, - }; - let subjectToken; - if (this.formatType === 'text') { - const response = await context.transporter.request(opts); - subjectToken = response.data; - } - else if (this.formatType === 'json' && this.subjectTokenFieldName) { - const response = await context.transporter.request(opts); - subjectToken = response.data[this.subjectTokenFieldName]; - } - if (!subjectToken) { - throw new Error('Unable to parse the subject_token from the credential_source URL'); - } - return subjectToken; - } -} -exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; /***/ }), -/***/ 4693: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 55031: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var json_stringify = (__nccwpck_require__(78574).stringify); +var json_parse = __nccwpck_require__(89099); -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/* global window */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BrowserCrypto = void 0; -// This file implements crypto functions we need using in-browser -// SubtleCrypto interface `window.crypto.subtle`. -const base64js = __nccwpck_require__(6463); -const crypto_1 = __nccwpck_require__(8043); -class BrowserCrypto { - constructor() { - if (typeof window === 'undefined' || - window.crypto === undefined || - window.crypto.subtle === undefined) { - throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); - } - } - async sha256DigestBase64(str) { - // SubtleCrypto digest() method is async, so we must make - // this method async as well. - // To calculate SHA256 digest using SubtleCrypto, we first - // need to convert an input string to an ArrayBuffer: - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const inputBuffer = new TextEncoder().encode(str); - // Result is ArrayBuffer as well. - const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); - return base64js.fromByteArray(new Uint8Array(outputBuffer)); - } - randomBytesBase64(count) { - const array = new Uint8Array(count); - window.crypto.getRandomValues(array); - return base64js.fromByteArray(array); - } - static padBase64(base64) { - // base64js requires padding, so let's add some '=' - while (base64.length % 4 !== 0) { - base64 += '='; - } - return base64; - } - async verify(pubkey, data, signature) { - const algo = { - name: 'RSASSA-PKCS1-v1_5', - hash: { name: 'SHA-256' }, - }; - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const dataArray = new TextEncoder().encode(data); - const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); - const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); - // SubtleCrypto's verify method is async so we must make - // this method async as well. - const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); - return result; - } - async sign(privateKey, data) { - const algo = { - name: 'RSASSA-PKCS1-v1_5', - hash: { name: 'SHA-256' }, - }; - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const dataArray = new TextEncoder().encode(data); - const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); - // SubtleCrypto's sign method is async so we must make - // this method async as well. - const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); - return base64js.fromByteArray(new Uint8Array(result)); - } - decodeBase64StringUtf8(base64) { - const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const result = new TextDecoder().decode(uint8array); - return result; - } - encodeBase64StringUtf8(text) { - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const uint8array = new TextEncoder().encode(text); - const result = base64js.fromByteArray(uint8array); - return result; - } - /** - * Computes the SHA-256 hash of the provided string. - * @param str The plain text string to hash. - * @return A promise that resolves with the SHA-256 hash of the provided - * string in hexadecimal encoding. - */ - async sha256DigestHex(str) { - // SubtleCrypto digest() method is async, so we must make - // this method async as well. - // To calculate SHA256 digest using SubtleCrypto, we first - // need to convert an input string to an ArrayBuffer: - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const inputBuffer = new TextEncoder().encode(str); - // Result is ArrayBuffer as well. - const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); - return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); - } - /** - * Computes the HMAC hash of a message using the provided crypto key and the - * SHA-256 algorithm. - * @param key The secret crypto key in utf-8 or ArrayBuffer format. - * @param msg The plain text message. - * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer - * format. - */ - async signWithHmacSha256(key, msg) { - // Convert key, if provided in ArrayBuffer format, to string. - const rawKey = typeof key === 'string' - ? key - : String.fromCharCode(...new Uint16Array(key)); - // eslint-disable-next-line node/no-unsupported-features/node-builtins - const enc = new TextEncoder(); - const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { - name: 'HMAC', - hash: { - name: 'SHA-256', - }, - }, false, ['sign']); - return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify } -} -exports.BrowserCrypto = BrowserCrypto; +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; /***/ }), -/***/ 8043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 89099: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var BigNumber = null; -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/* global window */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fromArrayBufferToHex = exports.hasBrowserCrypto = exports.createCrypto = void 0; -const crypto_1 = __nccwpck_require__(4693); -const crypto_2 = __nccwpck_require__(757); -function createCrypto() { - if (hasBrowserCrypto()) { - return new crypto_1.BrowserCrypto(); - } - return new crypto_2.NodeCrypto(); -} -exports.createCrypto = createCrypto; -function hasBrowserCrypto() { - return (typeof window !== 'undefined' && - typeof window.crypto !== 'undefined' && - typeof window.crypto.subtle !== 'undefined'); -} -exports.hasBrowserCrypto = hasBrowserCrypto; -/** - * Converts an ArrayBuffer to a hexadecimal string. - * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. - * @return The hexadecimal encoding of the ArrayBuffer. - */ -function fromArrayBufferToHex(arrayBuffer) { - // Convert buffer to byte array. - const byteArray = Array.from(new Uint8Array(arrayBuffer)); - // Convert bytes to hex string. - return byteArray - .map(byte => { - return byte.toString(16).padStart(2, '0'); - }) - .join(''); -} -exports.fromArrayBufferToHex = fromArrayBufferToHex; +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + This is a reference implementation. You are free to copy, modify, or + redistribute. -/***/ }), + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html -/***/ 757: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ -"use strict"; +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NodeCrypto = void 0; -const crypto = __nccwpck_require__(6113); -class NodeCrypto { - async sha256DigestBase64(str) { - return crypto.createHash('sha256').update(str).digest('base64'); - } - randomBytesBase64(count) { - return crypto.randomBytes(count).toString('base64'); - } - async verify(pubkey, data, signature) { - const verifier = crypto.createVerify('RSA-SHA256'); - verifier.update(data); - verifier.end(); - return verifier.verify(pubkey, signature, 'base64'); - } - async sign(privateKey, data) { - const signer = crypto.createSign('RSA-SHA256'); - signer.update(data); - signer.end(); - return signer.sign(privateKey, 'base64'); - } - decodeBase64StringUtf8(base64) { - return Buffer.from(base64, 'base64').toString('utf-8'); +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; } - encodeBase64StringUtf8(text) { - return Buffer.from(text, 'utf-8').toString('base64'); + if (options.storeAsString === true) { + _options.storeAsString = true; } - /** - * Computes the SHA-256 hash of the provided string. - * @param str The plain text string to hash. - * @return A promise that resolves with the SHA-256 hash of the provided - * string in hexadecimal encoding. - */ - async sha256DigestHex(str) { - return crypto.createHash('sha256').update(str).digest('hex'); + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } } - /** - * Computes the HMAC hash of a message using the provided crypto key and the - * SHA-256 algorithm. - * @param key The secret crypto key in utf-8 or ArrayBuffer format. - * @param msg The plain text message. - * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer - * format. - */ - async signWithHmacSha256(key, msg) { - const cryptoKey = typeof key === 'string' ? key : toBuffer(key); - return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } } -} -exports.NodeCrypto = NodeCrypto; -/** - * Converts a Node.js Buffer to an ArrayBuffer. - * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer - * @param buffer The Buffer input to covert. - * @return The ArrayBuffer representation of the input. - */ -function toArrayBuffer(buffer) { - return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); -} -/** - * Converts an ArrayBuffer to a Node.js Buffer. - * @param arrayBuffer The ArrayBuffer input to covert. - * @return The Buffer representation of the input. - */ -function toBuffer(arrayBuffer) { - return Buffer.from(arrayBuffer); -} + } + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. -/***/ }), + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. -/***/ 810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } -"use strict"; + // Get the next character. When there are no more characters, + // return the empty string. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; -// Copyright 2017 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -const googleauth_1 = __nccwpck_require__(695); -Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); -// Export common deps to ensure types/instances are the exact match. Useful -// for consistently configuring the library across versions. -exports.gcpMetadata = __nccwpck_require__(3563); -exports.gaxios = __nccwpck_require__(9555); -var authclient_1 = __nccwpck_require__(4627); -Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); -Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); -var computeclient_1 = __nccwpck_require__(6875); -Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); -var envDetect_1 = __nccwpck_require__(1380); -Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); -var iam_1 = __nccwpck_require__(9735); -Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); -var idtokenclient_1 = __nccwpck_require__(298); -Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); -var jwtaccess_1 = __nccwpck_require__(8740); -Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); -var jwtclient_1 = __nccwpck_require__(3959); -Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); -var impersonated_1 = __nccwpck_require__(1103); -Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); -var oauth2client_1 = __nccwpck_require__(3936); -Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); -Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); -var loginticket_1 = __nccwpck_require__(4524); -Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); -var refreshclient_1 = __nccwpck_require__(8790); -Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); -var awsclient_1 = __nccwpck_require__(1569); -Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); -var identitypoolclient_1 = __nccwpck_require__(117); -Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); -var externalclient_1 = __nccwpck_require__(4381); -Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); -var baseexternalclient_1 = __nccwpck_require__(7391); -Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); -var downscopedclient_1 = __nccwpck_require__(6270); -Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); -var pluggable_auth_client_1 = __nccwpck_require__(4782); -Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); -var passthrough_1 = __nccwpck_require__(2460); -Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); -var transporters_1 = __nccwpck_require__(2649); -Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); -const auth = new googleauth_1.GoogleAuth(); -exports.auth = auth; + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + var number, + string = ''; -/***/ }), + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(87558); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. -/***/ 6608: -/***/ ((__unused_webpack_module, exports) => { + var hex, + i, + string = '', + uffff; -"use strict"; + // When parsing for string values, we must look for " and \ characters. -// Copyright 2017 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validate = void 0; -// Accepts an options object passed from the user to the API. In the -// previous version of the API, it referred to a `Request` options object. -// Now it refers to an Axiox Request Config object. This is here to help -// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function validate(options) { - const vpairs = [ - { invalid: 'uri', expected: 'url' }, - { invalid: 'json', expected: 'data' }, - { invalid: 'qs', expected: 'params' }, - ]; - for (const pair of vpairs) { - if (options[pair.invalid]) { - const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; - throw new Error(e); + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } } - } -} -exports.validate = validate; + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. -/***/ }), + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. -/***/ 2649: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var array = []; -"use strict"; + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultTransporter = void 0; -const gaxios_1 = __nccwpck_require__(9555); -const options_1 = __nccwpck_require__(6608); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const pkg = __nccwpck_require__(1402); -const PRODUCT_NAME = 'google-api-nodejs-client'; -class DefaultTransporter { - constructor() { - /** - * A configurable, replacable `Gaxios` instance. - */ - this.instance = new gaxios_1.Gaxios(); - } - /** - * Configures request options before making a request. - * @param opts GaxiosOptions options. - * @return Configured options. - */ - configure(opts = {}) { - opts.headers = opts.headers || {}; - if (typeof window === 'undefined') { - // set transporter user agent if not in browser - const uaValue = opts.headers['User-Agent']; - if (!uaValue) { - opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; - } - else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { - opts.headers['User-Agent'] = - `${uaValue} ${DefaultTransporter.USER_AGENT}`; - } - // track google-auth-library-nodejs version: - if (!opts.headers['x-goog-api-client']) { - const nodeVersion = process.version.replace(/^v/, ''); - opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; - } + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object } - return opts; - } - /** - * Makes a request using Gaxios with given options. - * @param opts GaxiosOptions options. - * @param callback optional callback that contains GaxiosResponse object. - * @return GaxiosPromise, assuming no callback is passed. - */ - request(opts) { - // ensure the user isn't passing in request-style options - opts = this.configure(opts); - (0, options_1.validate)(opts); - return this.instance.request(opts).catch(e => { - throw this.processError(e); - }); - } - get defaults() { - return this.instance.defaults; - } - set defaults(opts) { - this.instance.defaults = opts; - } - /** - * Changes the error to include details from the body. - */ - processError(e) { - const res = e.response; - const err = e; - const body = res ? res.data : null; - if (res && body && body.error && res.status !== 200) { - if (typeof body.error === 'string') { - err.message = body.error; - err.status = res.status; - } - else if (Array.isArray(body.error.errors)) { - err.message = body.error.errors - .map((err2) => err2.message) - .join('\n'); - err.code = body.error.code; - err.errors = body.error.errors; + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); } - else { - err.message = body.error.message; - err.code = body.error.code; + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); } - else if (res && res.status >= 400) { - // Consider all 4xx and 5xx responses errors. - err.message = body; - err.status = res.status; - } - return err; - } -} -exports.DefaultTransporter = DefaultTransporter; -/** - * Default user agent. - */ -DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + } + error('Bad object'); + }; + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. -/***/ }), + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; -/***/ 8905: -/***/ (function(__unused_webpack_module, exports) { + // Return the json_parse function. It will have access to all of the above + // functions and variables. -"use strict"; + return function (source, reviver) { + var result; -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.LRUCache = exports.originalOrCamelOptions = exports.snakeToCamel = void 0; -/** - * Returns the camel case of a provided string. - * - * @remarks - * - * Match any `_` and not `_` pair, then return the uppercase of the not `_` - * character. - * - * @internal - * - * @param str the string to convert - * @returns the camelCase'd string - */ -function snakeToCamel(str) { - return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); -} -exports.snakeToCamel = snakeToCamel; -/** - * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference - * for original, non-camelCase key. - * - * @param obj object to lookup a value in - * @returns a `get` function for getting `obj[key || snakeKey]`, if available - */ -function originalOrCamelOptions(obj) { - /** - * - * @param key an index of object, preferably snake_case - * @returns the value `obj[key || snakeKey]`, if available - */ - function get(key) { - var _a; - const o = (obj || {}); - return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; - } - return { get }; -} -exports.originalOrCamelOptions = originalOrCamelOptions; -/** - * A simple LRU cache utility. - * Not meant for external usage. - * - * @experimental - * @internal - */ -class LRUCache { - constructor(options) { - _LRUCache_instances.add(this); - /** - * Maps are in order. Thus, the older item is the first item. - * - * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} - */ - _LRUCache_cache.set(this, new Map()); - this.capacity = options.capacity; - this.maxAge = options.maxAge; - } - /** - * Add an item to the cache. - * - * @param key the key to upsert - * @param value the value of the key - */ - set(key, value) { - __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); - __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); - } - /** - * Get an item from the cache. - * - * @param key the key to retrieve - */ - get(key) { - const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); - if (!item) - return; - __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); - __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); - return item.value; - } -} -exports.LRUCache = LRUCache; -_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { - __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); - __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { - value, - lastAccessed: Date.now(), - }); -}, _LRUCache_evict = function _LRUCache_evict() { - const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; - /** - * Because we know Maps are in order, this item is both the - * last item in the list (capacity) and oldest (maxAge). - */ - let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); - while (!oldestItem.done && - (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many - oldestItem.value[1].lastAccessed < cutoffDate) // too old - ) { - __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); - oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; }; +module.exports = json_parse; + /***/ }), -/***/ 6031: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 78574: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var BigNumber = __nccwpck_require__(87558); -/** - * Copyright 2018 Google LLC - * - * Distributed under MIT license. - * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT - */ -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.GoogleToken = void 0; -const fs = __nccwpck_require__(7147); -const gaxios_1 = __nccwpck_require__(9555); -const jws = __nccwpck_require__(4636); -const path = __nccwpck_require__(1017); -const util_1 = __nccwpck_require__(3837); -const readFile = fs.readFile - ? (0, util_1.promisify)(fs.readFile) - : async () => { - // if running in the web-browser, fs.readFile may not have been shimmed. - throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); - }; -const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; -const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; -class ErrorWithCode extends Error { - constructor(message, code) { - super(message); - this.code = code; - } -} -class GoogleToken { - get accessToken() { - return this.rawToken ? this.rawToken.access_token : undefined; - } - get idToken() { - return this.rawToken ? this.rawToken.id_token : undefined; - } - get tokenType() { - return this.rawToken ? this.rawToken.token_type : undefined; - } - get refreshToken() { - return this.rawToken ? this.rawToken.refresh_token : undefined; - } - /** - * Create a GoogleToken. - * - * @param options Configuration object. - */ - constructor(options) { - _GoogleToken_instances.add(this); - this.transporter = { - request: opts => (0, gaxios_1.request)(opts), - }; - _GoogleToken_inFlightRequest.set(this, void 0); - __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); - } - /** - * Returns whether the token has expired. - * - * @return true if the token has expired, false otherwise. - */ - hasExpired() { - const now = new Date().getTime(); - if (this.rawToken && this.expiresAt) { - return now >= this.expiresAt; - } - else { - return true; - } - } - /** - * Returns whether the token will expire within eagerRefreshThresholdMillis - * - * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. - */ - isTokenExpiring() { - var _a; - const now = new Date().getTime(); - const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; - if (this.rawToken && this.expiresAt) { - return this.expiresAt <= now + eagerRefreshThresholdMillis; - } - else { - return true; - } - } - getToken(callback, opts = {}) { - if (typeof callback === 'object') { - opts = callback; - callback = undefined; - } - opts = Object.assign({ - forceRefresh: false, - }, opts); - if (callback) { - const cb = callback; - __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); - return; - } - return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); - } - /** - * Given a keyFile, extract the key and client email if available - * @param keyFile Path to a json, pem, or p12 file that contains the key. - * @returns an object with privateKey and clientEmail properties - */ - async getCredentials(keyFile) { - const ext = path.extname(keyFile); - switch (ext) { - case '.json': { - const key = await readFile(keyFile, 'utf8'); - const body = JSON.parse(key); - const privateKey = body.private_key; - const clientEmail = body.client_email; - if (!privateKey || !clientEmail) { - throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); - } - return { privateKey, clientEmail }; - } - case '.der': - case '.crt': - case '.pem': { - const privateKey = await readFile(keyFile, 'utf8'); - return { privateKey }; - } - case '.p12': - case '.pfx': { - throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + - 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); - } - default: - throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + - 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); - } - } - revokeToken(callback) { - if (callback) { - __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); - return; - } - return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); - } -} -exports.GoogleToken = GoogleToken; -_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { - if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { - return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); - } - try { - return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); - } - finally { - __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); - } -}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { - if (this.isTokenExpiring() === false && opts.forceRefresh === false) { - return Promise.resolve(this.rawToken); - } - if (!this.key && !this.keyFile) { - throw new Error('No key or keyFile set.'); - } - if (!this.key && this.keyFile) { - const creds = await this.getCredentials(this.keyFile); - this.key = creds.privateKey; - this.iss = creds.clientEmail || this.iss; - if (!creds.clientEmail) { - __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); - } - } - return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); -}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { - if (!this.iss) { - throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); - } -}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { - if (!this.accessToken) { - throw new Error('No token to revoke.'); - } - const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; - await this.transporter.request({ - url, - retry: true, - }); - __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { - email: this.iss, - sub: this.sub, - key: this.key, - keyFile: this.keyFile, - scope: this.scope, - additionalClaims: this.additionalClaims, - }); -}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { - this.keyFile = options.keyFile; - this.key = options.key; - this.rawToken = undefined; - this.iss = options.email || options.iss; - this.sub = options.sub; - this.additionalClaims = options.additionalClaims; - if (typeof options.scope === 'object') { - this.scope = options.scope.join(' '); - } - else { - this.scope = options.scope; - } - this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; - if (options.transporter) { - this.transporter = options.transporter; - } -}, _GoogleToken_requestToken = -/** - * Request the token from Google. - */ -async function _GoogleToken_requestToken() { - var _a, _b; - const iat = Math.floor(new Date().getTime() / 1000); - const additionalClaims = this.additionalClaims || {}; - const payload = Object.assign({ - iss: this.iss, - scope: this.scope, - aud: GOOGLE_TOKEN_URL, - exp: iat + 3600, - iat, - sub: this.sub, - }, additionalClaims); - const signedJWT = jws.sign({ - header: { alg: 'RS256' }, - payload, - secret: this.key, - }); - try { - const r = await this.transporter.request({ - method: 'POST', - url: GOOGLE_TOKEN_URL, - data: { - grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', - assertion: signedJWT, - }, - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - responseType: 'json', - retryConfig: { - httpMethodsToRetry: ['POST'], - }, - }); - this.rawToken = r.data; - this.expiresAt = - r.data.expires_in === null || r.data.expires_in === undefined - ? undefined - : (iat + r.data.expires_in) * 1000; - return this.rawToken; - } - catch (e) { - this.rawToken = undefined; - this.tokenExpires = undefined; - const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) - ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data - : {}; - if (body.error) { - const desc = body.error_description - ? `: ${body.error_description}` - : ''; - e.message = `${body.error}${desc}`; - } - throw e; - } -}; -//# sourceMappingURL=index.js.map +/* + json2.js + 2013-05-26 + + Public Domain. -/***/ }), + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. -/***/ 1621: -/***/ ((module) => { + See http://www.JSON.org/js.html -"use strict"; + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html -module.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf('--'); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); -}; + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. -/***/ }), + This file creates a global JSON object containing two methods: stringify + and parse. -/***/ 2589: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. -"use strict"; -var __assign=this&&this.__assign||function(){__assign=Object.assign||function(t){for(var s,i=1,n=arguments.length;i'"&]/g,nonAscii:/[<>'"&\u0080-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,nonAsciiPrintable:/[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,nonAsciiPrintableOnly:/[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g,extensive:/[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/g};var defaultEncodeOptions={mode:"specialChars",level:"all",numeric:"decimal"};function encode(text,_a){var _b=_a===void 0?defaultEncodeOptions:_a,_c=_b.mode,mode=_c===void 0?"specialChars":_c,_d=_b.numeric,numeric=_d===void 0?"decimal":_d,_e=_b.level,level=_e===void 0?"all":_e;if(!text){return""}var encodeRegExp=encodeRegExps[mode];var references=allNamedReferences[level].characters;var isHex=numeric==="hexadecimal";return replaceUsingRegExp(text,encodeRegExp,(function(input){var result=references[input];if(!result){var code=input.length>1?surrogate_pairs_1.getCodePoint(input,0):input.charCodeAt(0);result=(isHex?"&#x"+code.toString(16):"&#"+code)+";"}return result}))}exports.encode=encode;var defaultDecodeOptions={scope:"body",level:"all"};var strict=/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g;var attribute=/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g;var baseDecodeRegExps={xml:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.xml},html4:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.html4},html5:{strict:strict,attribute:attribute,body:named_references_1.bodyRegExps.html5}};var decodeRegExps=__assign(__assign({},baseDecodeRegExps),{all:baseDecodeRegExps.html5});var fromCharCode=String.fromCharCode;var outOfBoundsChar=fromCharCode(65533);var defaultDecodeEntityOptions={level:"all"};function getDecodedEntity(entity,references,isAttribute,isStrict){var decodeResult=entity;var decodeEntityLastChar=entity[entity.length-1];if(isAttribute&&decodeEntityLastChar==="="){decodeResult=entity}else if(isStrict&&decodeEntityLastChar!==";"){decodeResult=entity}else{var decodeResultByReference=references[entity];if(decodeResultByReference){decodeResult=decodeResultByReference}else if(entity[0]==="&"&&entity[1]==="#"){var decodeSecondChar=entity[2];var decodeCode=decodeSecondChar=="x"||decodeSecondChar=="X"?parseInt(entity.substr(3),16):parseInt(entity.substr(2));decodeResult=decodeCode>=1114111?outOfBoundsChar:decodeCode>65535?surrogate_pairs_1.fromCodePoint(decodeCode):fromCharCode(numeric_unicode_map_1.numericUnicodeMap[decodeCode]||decodeCode)}}return decodeResult}function decodeEntity(entity,_a){var _b=(_a===void 0?defaultDecodeEntityOptions:_a).level,level=_b===void 0?"all":_b;if(!entity){return""}return getDecodedEntity(entity,allNamedReferences[level].entities,false,false)}exports.decodeEntity=decodeEntity;function decode(text,_a){var _b=_a===void 0?defaultDecodeOptions:_a,_c=_b.level,level=_c===void 0?"all":_c,_d=_b.scope,scope=_d===void 0?level==="xml"?"strict":"body":_d;if(!text){return""}var decodeRegExp=decodeRegExps[level][scope];var references=allNamedReferences[level].entities;var isAttribute=scope==="attribute";var isStrict=scope==="strict";return replaceUsingRegExp(text,decodeRegExp,(function(entity){return getDecodedEntity(entity,references,isAttribute,isStrict)}))}exports.decode=decode; -//# sourceMappingURL=./index.js.map + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. -/***/ }), + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. -/***/ 6068: -/***/ ((__unused_webpack_module, exports) => { + This method produces a JSON text from a JavaScript value. -"use strict"; -Object.defineProperty(exports, "__esModule", ({value:true}));exports.bodyRegExps={xml:/&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g,html4:/∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g,html5:/·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g};exports.namedReferences={xml:{entities:{"<":"<",">":">",""":'"',"'":"'","&":"&"},characters:{"<":"<",">":">",'"':""","'":"'","&":"&"}},html4:{entities:{"'":"'"," ":" "," ":" ","¡":"¡","¡":"¡","¢":"¢","¢":"¢","£":"£","£":"£","¤":"¤","¤":"¤","¥":"¥","¥":"¥","¦":"¦","¦":"¦","§":"§","§":"§","¨":"¨","¨":"¨","©":"©","©":"©","ª":"ª","ª":"ª","«":"«","«":"«","¬":"¬","¬":"¬","­":"­","­":"­","®":"®","®":"®","¯":"¯","¯":"¯","°":"°","°":"°","±":"±","±":"±","²":"²","²":"²","³":"³","³":"³","´":"´","´":"´","µ":"µ","µ":"µ","¶":"¶","¶":"¶","·":"·","·":"·","¸":"¸","¸":"¸","¹":"¹","¹":"¹","º":"º","º":"º","»":"»","»":"»","¼":"¼","¼":"¼","½":"½","½":"½","¾":"¾","¾":"¾","¿":"¿","¿":"¿","À":"À","À":"À","Á":"Á","Á":"Á","Â":"Â","Â":"Â","Ã":"Ã","Ã":"Ã","Ä":"Ä","Ä":"Ä","Å":"Å","Å":"Å","Æ":"Æ","Æ":"Æ","Ç":"Ç","Ç":"Ç","È":"È","È":"È","É":"É","É":"É","Ê":"Ê","Ê":"Ê","Ë":"Ë","Ë":"Ë","Ì":"Ì","Ì":"Ì","Í":"Í","Í":"Í","Î":"Î","Î":"Î","Ï":"Ï","Ï":"Ï","Ð":"Ð","Ð":"Ð","Ñ":"Ñ","Ñ":"Ñ","Ò":"Ò","Ò":"Ò","Ó":"Ó","Ó":"Ó","Ô":"Ô","Ô":"Ô","Õ":"Õ","Õ":"Õ","Ö":"Ö","Ö":"Ö","×":"×","×":"×","Ø":"Ø","Ø":"Ø","Ù":"Ù","Ù":"Ù","Ú":"Ú","Ú":"Ú","Û":"Û","Û":"Û","Ü":"Ü","Ü":"Ü","Ý":"Ý","Ý":"Ý","Þ":"Þ","Þ":"Þ","ß":"ß","ß":"ß","à":"à","à":"à","á":"á","á":"á","â":"â","â":"â","ã":"ã","ã":"ã","ä":"ä","ä":"ä","å":"å","å":"å","æ":"æ","æ":"æ","ç":"ç","ç":"ç","è":"è","è":"è","é":"é","é":"é","ê":"ê","ê":"ê","ë":"ë","ë":"ë","ì":"ì","ì":"ì","í":"í","í":"í","î":"î","î":"î","ï":"ï","ï":"ï","ð":"ð","ð":"ð","ñ":"ñ","ñ":"ñ","ò":"ò","ò":"ò","ó":"ó","ó":"ó","ô":"ô","ô":"ô","õ":"õ","õ":"õ","ö":"ö","ö":"ö","÷":"÷","÷":"÷","ø":"ø","ø":"ø","ù":"ù","ù":"ù","ú":"ú","ú":"ú","û":"û","û":"û","ü":"ü","ü":"ü","ý":"ý","ý":"ý","þ":"þ","þ":"þ","ÿ":"ÿ","ÿ":"ÿ",""":'"',""":'"',"&":"&","&":"&","<":"<","<":"<",">":">",">":">","Œ":"Œ","œ":"œ","Š":"Š","š":"š","Ÿ":"Ÿ","ˆ":"ˆ","˜":"˜"," ":" "," ":" "," ":" ","‌":"‌","‍":"‍","‎":"‎","‏":"‏","–":"–","—":"—","‘":"‘","’":"’","‚":"‚","“":"“","”":"”","„":"„","†":"†","‡":"‡","‰":"‰","‹":"‹","›":"›","€":"€","ƒ":"ƒ","Α":"Α","Β":"Β","Γ":"Γ","Δ":"Δ","Ε":"Ε","Ζ":"Ζ","Η":"Η","Θ":"Θ","Ι":"Ι","Κ":"Κ","Λ":"Λ","Μ":"Μ","Ν":"Ν","Ξ":"Ξ","Ο":"Ο","Π":"Π","Ρ":"Ρ","Σ":"Σ","Τ":"Τ","Υ":"Υ","Φ":"Φ","Χ":"Χ","Ψ":"Ψ","Ω":"Ω","α":"α","β":"β","γ":"γ","δ":"δ","ε":"ε","ζ":"ζ","η":"η","θ":"θ","ι":"ι","κ":"κ","λ":"λ","μ":"μ","ν":"ν","ξ":"ξ","ο":"ο","π":"π","ρ":"ρ","ς":"ς","σ":"σ","τ":"τ","υ":"υ","φ":"φ","χ":"χ","ψ":"ψ","ω":"ω","ϑ":"ϑ","ϒ":"ϒ","ϖ":"ϖ","•":"•","…":"…","′":"′","″":"″","‾":"‾","⁄":"⁄","℘":"℘","ℑ":"ℑ","ℜ":"ℜ","™":"™","ℵ":"ℵ","←":"←","↑":"↑","→":"→","↓":"↓","↔":"↔","↵":"↵","⇐":"⇐","⇑":"⇑","⇒":"⇒","⇓":"⇓","⇔":"⇔","∀":"∀","∂":"∂","∃":"∃","∅":"∅","∇":"∇","∈":"∈","∉":"∉","∋":"∋","∏":"∏","∑":"∑","−":"−","∗":"∗","√":"√","∝":"∝","∞":"∞","∠":"∠","∧":"∧","∨":"∨","∩":"∩","∪":"∪","∫":"∫","∴":"∴","∼":"∼","≅":"≅","≈":"≈","≠":"≠","≡":"≡","≤":"≤","≥":"≥","⊂":"⊂","⊃":"⊃","⊄":"⊄","⊆":"⊆","⊇":"⊇","⊕":"⊕","⊗":"⊗","⊥":"⊥","⋅":"⋅","⌈":"⌈","⌉":"⌉","⌊":"⌊","⌋":"⌋","⟨":"〈","⟩":"〉","◊":"◊","♠":"♠","♣":"♣","♥":"♥","♦":"♦"},characters:{"'":"'"," ":" ","¡":"¡","¢":"¢","£":"£","¤":"¤","¥":"¥","¦":"¦","§":"§","¨":"¨","©":"©","ª":"ª","«":"«","¬":"¬","­":"­","®":"®","¯":"¯","°":"°","±":"±","²":"²","³":"³","´":"´","µ":"µ","¶":"¶","·":"·","¸":"¸","¹":"¹","º":"º","»":"»","¼":"¼","½":"½","¾":"¾","¿":"¿","À":"À","Á":"Á","Â":"Â","Ã":"Ã","Ä":"Ä","Å":"Å","Æ":"Æ","Ç":"Ç","È":"È","É":"É","Ê":"Ê","Ë":"Ë","Ì":"Ì","Í":"Í","Î":"Î","Ï":"Ï","Ð":"Ð","Ñ":"Ñ","Ò":"Ò","Ó":"Ó","Ô":"Ô","Õ":"Õ","Ö":"Ö","×":"×","Ø":"Ø","Ù":"Ù","Ú":"Ú","Û":"Û","Ü":"Ü","Ý":"Ý","Þ":"Þ","ß":"ß","à":"à","á":"á","â":"â","ã":"ã","ä":"ä","å":"å","æ":"æ","ç":"ç","è":"è","é":"é","ê":"ê","ë":"ë","ì":"ì","í":"í","î":"î","ï":"ï","ð":"ð","ñ":"ñ","ò":"ò","ó":"ó","ô":"ô","õ":"õ","ö":"ö","÷":"÷","ø":"ø","ù":"ù","ú":"ú","û":"û","ü":"ü","ý":"ý","þ":"þ","ÿ":"ÿ",'"':""","&":"&","<":"<",">":">","Œ":"Œ","œ":"œ","Š":"Š","š":"š","Ÿ":"Ÿ","ˆ":"ˆ","˜":"˜"," ":" "," ":" "," ":" ","‌":"‌","‍":"‍","‎":"‎","‏":"‏","–":"–","—":"—","‘":"‘","’":"’","‚":"‚","“":"“","”":"”","„":"„","†":"†","‡":"‡","‰":"‰","‹":"‹","›":"›","€":"€","ƒ":"ƒ","Α":"Α","Β":"Β","Γ":"Γ","Δ":"Δ","Ε":"Ε","Ζ":"Ζ","Η":"Η","Θ":"Θ","Ι":"Ι","Κ":"Κ","Λ":"Λ","Μ":"Μ","Ν":"Ν","Ξ":"Ξ","Ο":"Ο","Π":"Π","Ρ":"Ρ","Σ":"Σ","Τ":"Τ","Υ":"Υ","Φ":"Φ","Χ":"Χ","Ψ":"Ψ","Ω":"Ω","α":"α","β":"β","γ":"γ","δ":"δ","ε":"ε","ζ":"ζ","η":"η","θ":"θ","ι":"ι","κ":"κ","λ":"λ","μ":"μ","ν":"ν","ξ":"ξ","ο":"ο","π":"π","ρ":"ρ","ς":"ς","σ":"σ","τ":"τ","υ":"υ","φ":"φ","χ":"χ","ψ":"ψ","ω":"ω","ϑ":"ϑ","ϒ":"ϒ","ϖ":"ϖ","•":"•","…":"…","′":"′","″":"″","‾":"‾","⁄":"⁄","℘":"℘","ℑ":"ℑ","ℜ":"ℜ","™":"™","ℵ":"ℵ","←":"←","↑":"↑","→":"→","↓":"↓","↔":"↔","↵":"↵","⇐":"⇐","⇑":"⇑","⇒":"⇒","⇓":"⇓","⇔":"⇔","∀":"∀","∂":"∂","∃":"∃","∅":"∅","∇":"∇","∈":"∈","∉":"∉","∋":"∋","∏":"∏","∑":"∑","−":"−","∗":"∗","√":"√","∝":"∝","∞":"∞","∠":"∠","∧":"∧","∨":"∨","∩":"∩","∪":"∪","∫":"∫","∴":"∴","∼":"∼","≅":"≅","≈":"≈","≠":"≠","≡":"≡","≤":"≤","≥":"≥","⊂":"⊂","⊃":"⊃","⊄":"⊄","⊆":"⊆","⊇":"⊇","⊕":"⊕","⊗":"⊗","⊥":"⊥","⋅":"⋅","⌈":"⌈","⌉":"⌉","⌊":"⌊","⌋":"⌋","〈":"⟨","〉":"⟩","◊":"◊","♠":"♠","♣":"♣","♥":"♥","♦":"♦"}},html5:{entities:{"Æ":"Æ","Æ":"Æ","&":"&","&":"&","Á":"Á","Á":"Á","Ă":"Ă","Â":"Â","Â":"Â","А":"А","𝔄":"𝔄","À":"À","À":"À","Α":"Α","Ā":"Ā","⩓":"⩓","Ą":"Ą","𝔸":"𝔸","⁡":"⁡","Å":"Å","Å":"Å","𝒜":"𝒜","≔":"≔","Ã":"Ã","Ã":"Ã","Ä":"Ä","Ä":"Ä","∖":"∖","⫧":"⫧","⌆":"⌆","Б":"Б","∵":"∵","ℬ":"ℬ","Β":"Β","𝔅":"𝔅","𝔹":"𝔹","˘":"˘","ℬ":"ℬ","≎":"≎","Ч":"Ч","©":"©","©":"©","Ć":"Ć","⋒":"⋒","ⅅ":"ⅅ","ℭ":"ℭ","Č":"Č","Ç":"Ç","Ç":"Ç","Ĉ":"Ĉ","∰":"∰","Ċ":"Ċ","¸":"¸","·":"·","ℭ":"ℭ","Χ":"Χ","⊙":"⊙","⊖":"⊖","⊕":"⊕","⊗":"⊗","∲":"∲","”":"”","’":"’","∷":"∷","⩴":"⩴","≡":"≡","∯":"∯","∮":"∮","ℂ":"ℂ","∐":"∐","∳":"∳","⨯":"⨯","𝒞":"𝒞","⋓":"⋓","≍":"≍","ⅅ":"ⅅ","⤑":"⤑","Ђ":"Ђ","Ѕ":"Ѕ","Џ":"Џ","‡":"‡","↡":"↡","⫤":"⫤","Ď":"Ď","Д":"Д","∇":"∇","Δ":"Δ","𝔇":"𝔇","´":"´","˙":"˙","˝":"˝","`":"`","˜":"˜","⋄":"⋄","ⅆ":"ⅆ","𝔻":"𝔻","¨":"¨","⃜":"⃜","≐":"≐","∯":"∯","¨":"¨","⇓":"⇓","⇐":"⇐","⇔":"⇔","⫤":"⫤","⟸":"⟸","⟺":"⟺","⟹":"⟹","⇒":"⇒","⊨":"⊨","⇑":"⇑","⇕":"⇕","∥":"∥","↓":"↓","⤓":"⤓","⇵":"⇵","̑":"̑","⥐":"⥐","⥞":"⥞","↽":"↽","⥖":"⥖","⥟":"⥟","⇁":"⇁","⥗":"⥗","⊤":"⊤","↧":"↧","⇓":"⇓","𝒟":"𝒟","Đ":"Đ","Ŋ":"Ŋ","Ð":"Ð","Ð":"Ð","É":"É","É":"É","Ě":"Ě","Ê":"Ê","Ê":"Ê","Э":"Э","Ė":"Ė","𝔈":"𝔈","È":"È","È":"È","∈":"∈","Ē":"Ē","◻":"◻","▫":"▫","Ę":"Ę","𝔼":"𝔼","Ε":"Ε","⩵":"⩵","≂":"≂","⇌":"⇌","ℰ":"ℰ","⩳":"⩳","Η":"Η","Ë":"Ë","Ë":"Ë","∃":"∃","ⅇ":"ⅇ","Ф":"Ф","𝔉":"𝔉","◼":"◼","▪":"▪","𝔽":"𝔽","∀":"∀","ℱ":"ℱ","ℱ":"ℱ","Ѓ":"Ѓ",">":">",">":">","Γ":"Γ","Ϝ":"Ϝ","Ğ":"Ğ","Ģ":"Ģ","Ĝ":"Ĝ","Г":"Г","Ġ":"Ġ","𝔊":"𝔊","⋙":"⋙","𝔾":"𝔾","≥":"≥","⋛":"⋛","≧":"≧","⪢":"⪢","≷":"≷","⩾":"⩾","≳":"≳","𝒢":"𝒢","≫":"≫","Ъ":"Ъ","ˇ":"ˇ","^":"^","Ĥ":"Ĥ","ℌ":"ℌ","ℋ":"ℋ","ℍ":"ℍ","─":"─","ℋ":"ℋ","Ħ":"Ħ","≎":"≎","≏":"≏","Е":"Е","IJ":"IJ","Ё":"Ё","Í":"Í","Í":"Í","Î":"Î","Î":"Î","И":"И","İ":"İ","ℑ":"ℑ","Ì":"Ì","Ì":"Ì","ℑ":"ℑ","Ī":"Ī","ⅈ":"ⅈ","⇒":"⇒","∬":"∬","∫":"∫","⋂":"⋂","⁣":"⁣","⁢":"⁢","Į":"Į","𝕀":"𝕀","Ι":"Ι","ℐ":"ℐ","Ĩ":"Ĩ","І":"І","Ï":"Ï","Ï":"Ï","Ĵ":"Ĵ","Й":"Й","𝔍":"𝔍","𝕁":"𝕁","𝒥":"𝒥","Ј":"Ј","Є":"Є","Х":"Х","Ќ":"Ќ","Κ":"Κ","Ķ":"Ķ","К":"К","𝔎":"𝔎","𝕂":"𝕂","𝒦":"𝒦","Љ":"Љ","<":"<","<":"<","Ĺ":"Ĺ","Λ":"Λ","⟪":"⟪","ℒ":"ℒ","↞":"↞","Ľ":"Ľ","Ļ":"Ļ","Л":"Л","⟨":"⟨","←":"←","⇤":"⇤","⇆":"⇆","⌈":"⌈","⟦":"⟦","⥡":"⥡","⇃":"⇃","⥙":"⥙","⌊":"⌊","↔":"↔","⥎":"⥎","⊣":"⊣","↤":"↤","⥚":"⥚","⊲":"⊲","⧏":"⧏","⊴":"⊴","⥑":"⥑","⥠":"⥠","↿":"↿","⥘":"⥘","↼":"↼","⥒":"⥒","⇐":"⇐","⇔":"⇔","⋚":"⋚","≦":"≦","≶":"≶","⪡":"⪡","⩽":"⩽","≲":"≲","𝔏":"𝔏","⋘":"⋘","⇚":"⇚","Ŀ":"Ŀ","⟵":"⟵","⟷":"⟷","⟶":"⟶","⟸":"⟸","⟺":"⟺","⟹":"⟹","𝕃":"𝕃","↙":"↙","↘":"↘","ℒ":"ℒ","↰":"↰","Ł":"Ł","≪":"≪","⤅":"⤅","М":"М"," ":" ","ℳ":"ℳ","𝔐":"𝔐","∓":"∓","𝕄":"𝕄","ℳ":"ℳ","Μ":"Μ","Њ":"Њ","Ń":"Ń","Ň":"Ň","Ņ":"Ņ","Н":"Н","​":"​","​":"​","​":"​","​":"​","≫":"≫","≪":"≪"," ":"\n","𝔑":"𝔑","⁠":"⁠"," ":" ","ℕ":"ℕ","⫬":"⫬","≢":"≢","≭":"≭","∦":"∦","∉":"∉","≠":"≠","≂̸":"≂̸","∄":"∄","≯":"≯","≱":"≱","≧̸":"≧̸","≫̸":"≫̸","≹":"≹","⩾̸":"⩾̸","≵":"≵","≎̸":"≎̸","≏̸":"≏̸","⋪":"⋪","⧏̸":"⧏̸","⋬":"⋬","≮":"≮","≰":"≰","≸":"≸","≪̸":"≪̸","⩽̸":"⩽̸","≴":"≴","⪢̸":"⪢̸","⪡̸":"⪡̸","⊀":"⊀","⪯̸":"⪯̸","⋠":"⋠","∌":"∌","⋫":"⋫","⧐̸":"⧐̸","⋭":"⋭","⊏̸":"⊏̸","⋢":"⋢","⊐̸":"⊐̸","⋣":"⋣","⊂⃒":"⊂⃒","⊈":"⊈","⊁":"⊁","⪰̸":"⪰̸","⋡":"⋡","≿̸":"≿̸","⊃⃒":"⊃⃒","⊉":"⊉","≁":"≁","≄":"≄","≇":"≇","≉":"≉","∤":"∤","𝒩":"𝒩","Ñ":"Ñ","Ñ":"Ñ","Ν":"Ν","Œ":"Œ","Ó":"Ó","Ó":"Ó","Ô":"Ô","Ô":"Ô","О":"О","Ő":"Ő","𝔒":"𝔒","Ò":"Ò","Ò":"Ò","Ō":"Ō","Ω":"Ω","Ο":"Ο","𝕆":"𝕆","“":"“","‘":"‘","⩔":"⩔","𝒪":"𝒪","Ø":"Ø","Ø":"Ø","Õ":"Õ","Õ":"Õ","⨷":"⨷","Ö":"Ö","Ö":"Ö","‾":"‾","⏞":"⏞","⎴":"⎴","⏜":"⏜","∂":"∂","П":"П","𝔓":"𝔓","Φ":"Φ","Π":"Π","±":"±","ℌ":"ℌ","ℙ":"ℙ","⪻":"⪻","≺":"≺","⪯":"⪯","≼":"≼","≾":"≾","″":"″","∏":"∏","∷":"∷","∝":"∝","𝒫":"𝒫","Ψ":"Ψ",""":'"',""":'"',"𝔔":"𝔔","ℚ":"ℚ","𝒬":"𝒬","⤐":"⤐","®":"®","®":"®","Ŕ":"Ŕ","⟫":"⟫","↠":"↠","⤖":"⤖","Ř":"Ř","Ŗ":"Ŗ","Р":"Р","ℜ":"ℜ","∋":"∋","⇋":"⇋","⥯":"⥯","ℜ":"ℜ","Ρ":"Ρ","⟩":"⟩","→":"→","⇥":"⇥","⇄":"⇄","⌉":"⌉","⟧":"⟧","⥝":"⥝","⇂":"⇂","⥕":"⥕","⌋":"⌋","⊢":"⊢","↦":"↦","⥛":"⥛","⊳":"⊳","⧐":"⧐","⊵":"⊵","⥏":"⥏","⥜":"⥜","↾":"↾","⥔":"⥔","⇀":"⇀","⥓":"⥓","⇒":"⇒","ℝ":"ℝ","⥰":"⥰","⇛":"⇛","ℛ":"ℛ","↱":"↱","⧴":"⧴","Щ":"Щ","Ш":"Ш","Ь":"Ь","Ś":"Ś","⪼":"⪼","Š":"Š","Ş":"Ş","Ŝ":"Ŝ","С":"С","𝔖":"𝔖","↓":"↓","←":"←","→":"→","↑":"↑","Σ":"Σ","∘":"∘","𝕊":"𝕊","√":"√","□":"□","⊓":"⊓","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊔":"⊔","𝒮":"𝒮","⋆":"⋆","⋐":"⋐","⋐":"⋐","⊆":"⊆","≻":"≻","⪰":"⪰","≽":"≽","≿":"≿","∋":"∋","∑":"∑","⋑":"⋑","⊃":"⊃","⊇":"⊇","⋑":"⋑","Þ":"Þ","Þ":"Þ","™":"™","Ћ":"Ћ","Ц":"Ц"," ":"\t","Τ":"Τ","Ť":"Ť","Ţ":"Ţ","Т":"Т","𝔗":"𝔗","∴":"∴","Θ":"Θ","  ":"  "," ":" ","∼":"∼","≃":"≃","≅":"≅","≈":"≈","𝕋":"𝕋","⃛":"⃛","𝒯":"𝒯","Ŧ":"Ŧ","Ú":"Ú","Ú":"Ú","↟":"↟","⥉":"⥉","Ў":"Ў","Ŭ":"Ŭ","Û":"Û","Û":"Û","У":"У","Ű":"Ű","𝔘":"𝔘","Ù":"Ù","Ù":"Ù","Ū":"Ū","_":"_","⏟":"⏟","⎵":"⎵","⏝":"⏝","⋃":"⋃","⊎":"⊎","Ų":"Ų","𝕌":"𝕌","↑":"↑","⤒":"⤒","⇅":"⇅","↕":"↕","⥮":"⥮","⊥":"⊥","↥":"↥","⇑":"⇑","⇕":"⇕","↖":"↖","↗":"↗","ϒ":"ϒ","Υ":"Υ","Ů":"Ů","𝒰":"𝒰","Ũ":"Ũ","Ü":"Ü","Ü":"Ü","⊫":"⊫","⫫":"⫫","В":"В","⊩":"⊩","⫦":"⫦","⋁":"⋁","‖":"‖","‖":"‖","∣":"∣","|":"|","❘":"❘","≀":"≀"," ":" ","𝔙":"𝔙","𝕍":"𝕍","𝒱":"𝒱","⊪":"⊪","Ŵ":"Ŵ","⋀":"⋀","𝔚":"𝔚","𝕎":"𝕎","𝒲":"𝒲","𝔛":"𝔛","Ξ":"Ξ","𝕏":"𝕏","𝒳":"𝒳","Я":"Я","Ї":"Ї","Ю":"Ю","Ý":"Ý","Ý":"Ý","Ŷ":"Ŷ","Ы":"Ы","𝔜":"𝔜","𝕐":"𝕐","𝒴":"𝒴","Ÿ":"Ÿ","Ж":"Ж","Ź":"Ź","Ž":"Ž","З":"З","Ż":"Ż","​":"​","Ζ":"Ζ","ℨ":"ℨ","ℤ":"ℤ","𝒵":"𝒵","á":"á","á":"á","ă":"ă","∾":"∾","∾̳":"∾̳","∿":"∿","â":"â","â":"â","´":"´","´":"´","а":"а","æ":"æ","æ":"æ","⁡":"⁡","𝔞":"𝔞","à":"à","à":"à","ℵ":"ℵ","ℵ":"ℵ","α":"α","ā":"ā","⨿":"⨿","&":"&","&":"&","∧":"∧","⩕":"⩕","⩜":"⩜","⩘":"⩘","⩚":"⩚","∠":"∠","⦤":"⦤","∠":"∠","∡":"∡","⦨":"⦨","⦩":"⦩","⦪":"⦪","⦫":"⦫","⦬":"⦬","⦭":"⦭","⦮":"⦮","⦯":"⦯","∟":"∟","⊾":"⊾","⦝":"⦝","∢":"∢","Å":"Å","⍼":"⍼","ą":"ą","𝕒":"𝕒","≈":"≈","⩰":"⩰","⩯":"⩯","≊":"≊","≋":"≋","'":"'","≈":"≈","≊":"≊","å":"å","å":"å","𝒶":"𝒶","*":"*","≈":"≈","≍":"≍","ã":"ã","ã":"ã","ä":"ä","ä":"ä","∳":"∳","⨑":"⨑","⫭":"⫭","≌":"≌","϶":"϶","‵":"‵","∽":"∽","⋍":"⋍","⊽":"⊽","⌅":"⌅","⌅":"⌅","⎵":"⎵","⎶":"⎶","≌":"≌","б":"б","„":"„","∵":"∵","∵":"∵","⦰":"⦰","϶":"϶","ℬ":"ℬ","β":"β","ℶ":"ℶ","≬":"≬","𝔟":"𝔟","⋂":"⋂","◯":"◯","⋃":"⋃","⨀":"⨀","⨁":"⨁","⨂":"⨂","⨆":"⨆","★":"★","▽":"▽","△":"△","⨄":"⨄","⋁":"⋁","⋀":"⋀","⤍":"⤍","⧫":"⧫","▪":"▪","▴":"▴","▾":"▾","◂":"◂","▸":"▸","␣":"␣","▒":"▒","░":"░","▓":"▓","█":"█","=⃥":"=⃥","≡⃥":"≡⃥","⌐":"⌐","𝕓":"𝕓","⊥":"⊥","⊥":"⊥","⋈":"⋈","╗":"╗","╔":"╔","╖":"╖","╓":"╓","═":"═","╦":"╦","╩":"╩","╤":"╤","╧":"╧","╝":"╝","╚":"╚","╜":"╜","╙":"╙","║":"║","╬":"╬","╣":"╣","╠":"╠","╫":"╫","╢":"╢","╟":"╟","⧉":"⧉","╕":"╕","╒":"╒","┐":"┐","┌":"┌","─":"─","╥":"╥","╨":"╨","┬":"┬","┴":"┴","⊟":"⊟","⊞":"⊞","⊠":"⊠","╛":"╛","╘":"╘","┘":"┘","└":"└","│":"│","╪":"╪","╡":"╡","╞":"╞","┼":"┼","┤":"┤","├":"├","‵":"‵","˘":"˘","¦":"¦","¦":"¦","𝒷":"𝒷","⁏":"⁏","∽":"∽","⋍":"⋍","\":"\\","⧅":"⧅","⟈":"⟈","•":"•","•":"•","≎":"≎","⪮":"⪮","≏":"≏","≏":"≏","ć":"ć","∩":"∩","⩄":"⩄","⩉":"⩉","⩋":"⩋","⩇":"⩇","⩀":"⩀","∩︀":"∩︀","⁁":"⁁","ˇ":"ˇ","⩍":"⩍","č":"č","ç":"ç","ç":"ç","ĉ":"ĉ","⩌":"⩌","⩐":"⩐","ċ":"ċ","¸":"¸","¸":"¸","⦲":"⦲","¢":"¢","¢":"¢","·":"·","𝔠":"𝔠","ч":"ч","✓":"✓","✓":"✓","χ":"χ","○":"○","⧃":"⧃","ˆ":"ˆ","≗":"≗","↺":"↺","↻":"↻","®":"®","Ⓢ":"Ⓢ","⊛":"⊛","⊚":"⊚","⊝":"⊝","≗":"≗","⨐":"⨐","⫯":"⫯","⧂":"⧂","♣":"♣","♣":"♣",":":":","≔":"≔","≔":"≔",",":",","@":"@","∁":"∁","∘":"∘","∁":"∁","ℂ":"ℂ","≅":"≅","⩭":"⩭","∮":"∮","𝕔":"𝕔","∐":"∐","©":"©","©":"©","℗":"℗","↵":"↵","✗":"✗","𝒸":"𝒸","⫏":"⫏","⫑":"⫑","⫐":"⫐","⫒":"⫒","⋯":"⋯","⤸":"⤸","⤵":"⤵","⋞":"⋞","⋟":"⋟","↶":"↶","⤽":"⤽","∪":"∪","⩈":"⩈","⩆":"⩆","⩊":"⩊","⊍":"⊍","⩅":"⩅","∪︀":"∪︀","↷":"↷","⤼":"⤼","⋞":"⋞","⋟":"⋟","⋎":"⋎","⋏":"⋏","¤":"¤","¤":"¤","↶":"↶","↷":"↷","⋎":"⋎","⋏":"⋏","∲":"∲","∱":"∱","⌭":"⌭","⇓":"⇓","⥥":"⥥","†":"†","ℸ":"ℸ","↓":"↓","‐":"‐","⊣":"⊣","⤏":"⤏","˝":"˝","ď":"ď","д":"д","ⅆ":"ⅆ","‡":"‡","⇊":"⇊","⩷":"⩷","°":"°","°":"°","δ":"δ","⦱":"⦱","⥿":"⥿","𝔡":"𝔡","⇃":"⇃","⇂":"⇂","⋄":"⋄","⋄":"⋄","♦":"♦","♦":"♦","¨":"¨","ϝ":"ϝ","⋲":"⋲","÷":"÷","÷":"÷","÷":"÷","⋇":"⋇","⋇":"⋇","ђ":"ђ","⌞":"⌞","⌍":"⌍","$":"$","𝕕":"𝕕","˙":"˙","≐":"≐","≑":"≑","∸":"∸","∔":"∔","⊡":"⊡","⌆":"⌆","↓":"↓","⇊":"⇊","⇃":"⇃","⇂":"⇂","⤐":"⤐","⌟":"⌟","⌌":"⌌","𝒹":"𝒹","ѕ":"ѕ","⧶":"⧶","đ":"đ","⋱":"⋱","▿":"▿","▾":"▾","⇵":"⇵","⥯":"⥯","⦦":"⦦","џ":"џ","⟿":"⟿","⩷":"⩷","≑":"≑","é":"é","é":"é","⩮":"⩮","ě":"ě","≖":"≖","ê":"ê","ê":"ê","≕":"≕","э":"э","ė":"ė","ⅇ":"ⅇ","≒":"≒","𝔢":"𝔢","⪚":"⪚","è":"è","è":"è","⪖":"⪖","⪘":"⪘","⪙":"⪙","⏧":"⏧","ℓ":"ℓ","⪕":"⪕","⪗":"⪗","ē":"ē","∅":"∅","∅":"∅","∅":"∅"," ":" "," ":" "," ":" ","ŋ":"ŋ"," ":" ","ę":"ę","𝕖":"𝕖","⋕":"⋕","⧣":"⧣","⩱":"⩱","ε":"ε","ε":"ε","ϵ":"ϵ","≖":"≖","≕":"≕","≂":"≂","⪖":"⪖","⪕":"⪕","=":"=","≟":"≟","≡":"≡","⩸":"⩸","⧥":"⧥","≓":"≓","⥱":"⥱","ℯ":"ℯ","≐":"≐","≂":"≂","η":"η","ð":"ð","ð":"ð","ë":"ë","ë":"ë","€":"€","!":"!","∃":"∃","ℰ":"ℰ","ⅇ":"ⅇ","≒":"≒","ф":"ф","♀":"♀","ffi":"ffi","ff":"ff","ffl":"ffl","𝔣":"𝔣","fi":"fi","fj":"fj","♭":"♭","fl":"fl","▱":"▱","ƒ":"ƒ","𝕗":"𝕗","∀":"∀","⋔":"⋔","⫙":"⫙","⨍":"⨍","½":"½","½":"½","⅓":"⅓","¼":"¼","¼":"¼","⅕":"⅕","⅙":"⅙","⅛":"⅛","⅔":"⅔","⅖":"⅖","¾":"¾","¾":"¾","⅗":"⅗","⅜":"⅜","⅘":"⅘","⅚":"⅚","⅝":"⅝","⅞":"⅞","⁄":"⁄","⌢":"⌢","𝒻":"𝒻","≧":"≧","⪌":"⪌","ǵ":"ǵ","γ":"γ","ϝ":"ϝ","⪆":"⪆","ğ":"ğ","ĝ":"ĝ","г":"г","ġ":"ġ","≥":"≥","⋛":"⋛","≥":"≥","≧":"≧","⩾":"⩾","⩾":"⩾","⪩":"⪩","⪀":"⪀","⪂":"⪂","⪄":"⪄","⋛︀":"⋛︀","⪔":"⪔","𝔤":"𝔤","≫":"≫","⋙":"⋙","ℷ":"ℷ","ѓ":"ѓ","≷":"≷","⪒":"⪒","⪥":"⪥","⪤":"⪤","≩":"≩","⪊":"⪊","⪊":"⪊","⪈":"⪈","⪈":"⪈","≩":"≩","⋧":"⋧","𝕘":"𝕘","`":"`","ℊ":"ℊ","≳":"≳","⪎":"⪎","⪐":"⪐",">":">",">":">","⪧":"⪧","⩺":"⩺","⋗":"⋗","⦕":"⦕","⩼":"⩼","⪆":"⪆","⥸":"⥸","⋗":"⋗","⋛":"⋛","⪌":"⪌","≷":"≷","≳":"≳","≩︀":"≩︀","≩︀":"≩︀","⇔":"⇔"," ":" ","½":"½","ℋ":"ℋ","ъ":"ъ","↔":"↔","⥈":"⥈","↭":"↭","ℏ":"ℏ","ĥ":"ĥ","♥":"♥","♥":"♥","…":"…","⊹":"⊹","𝔥":"𝔥","⤥":"⤥","⤦":"⤦","⇿":"⇿","∻":"∻","↩":"↩","↪":"↪","𝕙":"𝕙","―":"―","𝒽":"𝒽","ℏ":"ℏ","ħ":"ħ","⁃":"⁃","‐":"‐","í":"í","í":"í","⁣":"⁣","î":"î","î":"î","и":"и","е":"е","¡":"¡","¡":"¡","⇔":"⇔","𝔦":"𝔦","ì":"ì","ì":"ì","ⅈ":"ⅈ","⨌":"⨌","∭":"∭","⧜":"⧜","℩":"℩","ij":"ij","ī":"ī","ℑ":"ℑ","ℐ":"ℐ","ℑ":"ℑ","ı":"ı","⊷":"⊷","Ƶ":"Ƶ","∈":"∈","℅":"℅","∞":"∞","⧝":"⧝","ı":"ı","∫":"∫","⊺":"⊺","ℤ":"ℤ","⊺":"⊺","⨗":"⨗","⨼":"⨼","ё":"ё","į":"į","𝕚":"𝕚","ι":"ι","⨼":"⨼","¿":"¿","¿":"¿","𝒾":"𝒾","∈":"∈","⋹":"⋹","⋵":"⋵","⋴":"⋴","⋳":"⋳","∈":"∈","⁢":"⁢","ĩ":"ĩ","і":"і","ï":"ï","ï":"ï","ĵ":"ĵ","й":"й","𝔧":"𝔧","ȷ":"ȷ","𝕛":"𝕛","𝒿":"𝒿","ј":"ј","є":"є","κ":"κ","ϰ":"ϰ","ķ":"ķ","к":"к","𝔨":"𝔨","ĸ":"ĸ","х":"х","ќ":"ќ","𝕜":"𝕜","𝓀":"𝓀","⇚":"⇚","⇐":"⇐","⤛":"⤛","⤎":"⤎","≦":"≦","⪋":"⪋","⥢":"⥢","ĺ":"ĺ","⦴":"⦴","ℒ":"ℒ","λ":"λ","⟨":"⟨","⦑":"⦑","⟨":"⟨","⪅":"⪅","«":"«","«":"«","←":"←","⇤":"⇤","⤟":"⤟","⤝":"⤝","↩":"↩","↫":"↫","⤹":"⤹","⥳":"⥳","↢":"↢","⪫":"⪫","⤙":"⤙","⪭":"⪭","⪭︀":"⪭︀","⤌":"⤌","❲":"❲","{":"{","[":"[","⦋":"⦋","⦏":"⦏","⦍":"⦍","ľ":"ľ","ļ":"ļ","⌈":"⌈","{":"{","л":"л","⤶":"⤶","“":"“","„":"„","⥧":"⥧","⥋":"⥋","↲":"↲","≤":"≤","←":"←","↢":"↢","↽":"↽","↼":"↼","⇇":"⇇","↔":"↔","⇆":"⇆","⇋":"⇋","↭":"↭","⋋":"⋋","⋚":"⋚","≤":"≤","≦":"≦","⩽":"⩽","⩽":"⩽","⪨":"⪨","⩿":"⩿","⪁":"⪁","⪃":"⪃","⋚︀":"⋚︀","⪓":"⪓","⪅":"⪅","⋖":"⋖","⋚":"⋚","⪋":"⪋","≶":"≶","≲":"≲","⥼":"⥼","⌊":"⌊","𝔩":"𝔩","≶":"≶","⪑":"⪑","↽":"↽","↼":"↼","⥪":"⥪","▄":"▄","љ":"љ","≪":"≪","⇇":"⇇","⌞":"⌞","⥫":"⥫","◺":"◺","ŀ":"ŀ","⎰":"⎰","⎰":"⎰","≨":"≨","⪉":"⪉","⪉":"⪉","⪇":"⪇","⪇":"⪇","≨":"≨","⋦":"⋦","⟬":"⟬","⇽":"⇽","⟦":"⟦","⟵":"⟵","⟷":"⟷","⟼":"⟼","⟶":"⟶","↫":"↫","↬":"↬","⦅":"⦅","𝕝":"𝕝","⨭":"⨭","⨴":"⨴","∗":"∗","_":"_","◊":"◊","◊":"◊","⧫":"⧫","(":"(","⦓":"⦓","⇆":"⇆","⌟":"⌟","⇋":"⇋","⥭":"⥭","‎":"‎","⊿":"⊿","‹":"‹","𝓁":"𝓁","↰":"↰","≲":"≲","⪍":"⪍","⪏":"⪏","[":"[","‘":"‘","‚":"‚","ł":"ł","<":"<","<":"<","⪦":"⪦","⩹":"⩹","⋖":"⋖","⋋":"⋋","⋉":"⋉","⥶":"⥶","⩻":"⩻","⦖":"⦖","◃":"◃","⊴":"⊴","◂":"◂","⥊":"⥊","⥦":"⥦","≨︀":"≨︀","≨︀":"≨︀","∺":"∺","¯":"¯","¯":"¯","♂":"♂","✠":"✠","✠":"✠","↦":"↦","↦":"↦","↧":"↧","↤":"↤","↥":"↥","▮":"▮","⨩":"⨩","м":"м","—":"—","∡":"∡","𝔪":"𝔪","℧":"℧","µ":"µ","µ":"µ","∣":"∣","*":"*","⫰":"⫰","·":"·","·":"·","−":"−","⊟":"⊟","∸":"∸","⨪":"⨪","⫛":"⫛","…":"…","∓":"∓","⊧":"⊧","𝕞":"𝕞","∓":"∓","𝓂":"𝓂","∾":"∾","μ":"μ","⊸":"⊸","⊸":"⊸","⋙̸":"⋙̸","≫⃒":"≫⃒","≫̸":"≫̸","⇍":"⇍","⇎":"⇎","⋘̸":"⋘̸","≪⃒":"≪⃒","≪̸":"≪̸","⇏":"⇏","⊯":"⊯","⊮":"⊮","∇":"∇","ń":"ń","∠⃒":"∠⃒","≉":"≉","⩰̸":"⩰̸","≋̸":"≋̸","ʼn":"ʼn","≉":"≉","♮":"♮","♮":"♮","ℕ":"ℕ"," ":" "," ":" ","≎̸":"≎̸","≏̸":"≏̸","⩃":"⩃","ň":"ň","ņ":"ņ","≇":"≇","⩭̸":"⩭̸","⩂":"⩂","н":"н","–":"–","≠":"≠","⇗":"⇗","⤤":"⤤","↗":"↗","↗":"↗","≐̸":"≐̸","≢":"≢","⤨":"⤨","≂̸":"≂̸","∄":"∄","∄":"∄","𝔫":"𝔫","≧̸":"≧̸","≱":"≱","≱":"≱","≧̸":"≧̸","⩾̸":"⩾̸","⩾̸":"⩾̸","≵":"≵","≯":"≯","≯":"≯","⇎":"⇎","↮":"↮","⫲":"⫲","∋":"∋","⋼":"⋼","⋺":"⋺","∋":"∋","њ":"њ","⇍":"⇍","≦̸":"≦̸","↚":"↚","‥":"‥","≰":"≰","↚":"↚","↮":"↮","≰":"≰","≦̸":"≦̸","⩽̸":"⩽̸","⩽̸":"⩽̸","≮":"≮","≴":"≴","≮":"≮","⋪":"⋪","⋬":"⋬","∤":"∤","𝕟":"𝕟","¬":"¬","¬":"¬","∉":"∉","⋹̸":"⋹̸","⋵̸":"⋵̸","∉":"∉","⋷":"⋷","⋶":"⋶","∌":"∌","∌":"∌","⋾":"⋾","⋽":"⋽","∦":"∦","∦":"∦","⫽⃥":"⫽⃥","∂̸":"∂̸","⨔":"⨔","⊀":"⊀","⋠":"⋠","⪯̸":"⪯̸","⊀":"⊀","⪯̸":"⪯̸","⇏":"⇏","↛":"↛","⤳̸":"⤳̸","↝̸":"↝̸","↛":"↛","⋫":"⋫","⋭":"⋭","⊁":"⊁","⋡":"⋡","⪰̸":"⪰̸","𝓃":"𝓃","∤":"∤","∦":"∦","≁":"≁","≄":"≄","≄":"≄","∤":"∤","∦":"∦","⋢":"⋢","⋣":"⋣","⊄":"⊄","⫅̸":"⫅̸","⊈":"⊈","⊂⃒":"⊂⃒","⊈":"⊈","⫅̸":"⫅̸","⊁":"⊁","⪰̸":"⪰̸","⊅":"⊅","⫆̸":"⫆̸","⊉":"⊉","⊃⃒":"⊃⃒","⊉":"⊉","⫆̸":"⫆̸","≹":"≹","ñ":"ñ","ñ":"ñ","≸":"≸","⋪":"⋪","⋬":"⋬","⋫":"⋫","⋭":"⋭","ν":"ν","#":"#","№":"№"," ":" ","⊭":"⊭","⤄":"⤄","≍⃒":"≍⃒","⊬":"⊬","≥⃒":"≥⃒",">⃒":">⃒","⧞":"⧞","⤂":"⤂","≤⃒":"≤⃒","<⃒":"<⃒","⊴⃒":"⊴⃒","⤃":"⤃","⊵⃒":"⊵⃒","∼⃒":"∼⃒","⇖":"⇖","⤣":"⤣","↖":"↖","↖":"↖","⤧":"⤧","Ⓢ":"Ⓢ","ó":"ó","ó":"ó","⊛":"⊛","⊚":"⊚","ô":"ô","ô":"ô","о":"о","⊝":"⊝","ő":"ő","⨸":"⨸","⊙":"⊙","⦼":"⦼","œ":"œ","⦿":"⦿","𝔬":"𝔬","˛":"˛","ò":"ò","ò":"ò","⧁":"⧁","⦵":"⦵","Ω":"Ω","∮":"∮","↺":"↺","⦾":"⦾","⦻":"⦻","‾":"‾","⧀":"⧀","ō":"ō","ω":"ω","ο":"ο","⦶":"⦶","⊖":"⊖","𝕠":"𝕠","⦷":"⦷","⦹":"⦹","⊕":"⊕","∨":"∨","↻":"↻","⩝":"⩝","ℴ":"ℴ","ℴ":"ℴ","ª":"ª","ª":"ª","º":"º","º":"º","⊶":"⊶","⩖":"⩖","⩗":"⩗","⩛":"⩛","ℴ":"ℴ","ø":"ø","ø":"ø","⊘":"⊘","õ":"õ","õ":"õ","⊗":"⊗","⨶":"⨶","ö":"ö","ö":"ö","⌽":"⌽","∥":"∥","¶":"¶","¶":"¶","∥":"∥","⫳":"⫳","⫽":"⫽","∂":"∂","п":"п","%":"%",".":".","‰":"‰","⊥":"⊥","‱":"‱","𝔭":"𝔭","φ":"φ","ϕ":"ϕ","ℳ":"ℳ","☎":"☎","π":"π","⋔":"⋔","ϖ":"ϖ","ℏ":"ℏ","ℎ":"ℎ","ℏ":"ℏ","+":"+","⨣":"⨣","⊞":"⊞","⨢":"⨢","∔":"∔","⨥":"⨥","⩲":"⩲","±":"±","±":"±","⨦":"⨦","⨧":"⨧","±":"±","⨕":"⨕","𝕡":"𝕡","£":"£","£":"£","≺":"≺","⪳":"⪳","⪷":"⪷","≼":"≼","⪯":"⪯","≺":"≺","⪷":"⪷","≼":"≼","⪯":"⪯","⪹":"⪹","⪵":"⪵","⋨":"⋨","≾":"≾","′":"′","ℙ":"ℙ","⪵":"⪵","⪹":"⪹","⋨":"⋨","∏":"∏","⌮":"⌮","⌒":"⌒","⌓":"⌓","∝":"∝","∝":"∝","≾":"≾","⊰":"⊰","𝓅":"𝓅","ψ":"ψ"," ":" ","𝔮":"𝔮","⨌":"⨌","𝕢":"𝕢","⁗":"⁗","𝓆":"𝓆","ℍ":"ℍ","⨖":"⨖","?":"?","≟":"≟",""":'"',""":'"',"⇛":"⇛","⇒":"⇒","⤜":"⤜","⤏":"⤏","⥤":"⥤","∽̱":"∽̱","ŕ":"ŕ","√":"√","⦳":"⦳","⟩":"⟩","⦒":"⦒","⦥":"⦥","⟩":"⟩","»":"»","»":"»","→":"→","⥵":"⥵","⇥":"⇥","⤠":"⤠","⤳":"⤳","⤞":"⤞","↪":"↪","↬":"↬","⥅":"⥅","⥴":"⥴","↣":"↣","↝":"↝","⤚":"⤚","∶":"∶","ℚ":"ℚ","⤍":"⤍","❳":"❳","}":"}","]":"]","⦌":"⦌","⦎":"⦎","⦐":"⦐","ř":"ř","ŗ":"ŗ","⌉":"⌉","}":"}","р":"р","⤷":"⤷","⥩":"⥩","”":"”","”":"”","↳":"↳","ℜ":"ℜ","ℛ":"ℛ","ℜ":"ℜ","ℝ":"ℝ","▭":"▭","®":"®","®":"®","⥽":"⥽","⌋":"⌋","𝔯":"𝔯","⇁":"⇁","⇀":"⇀","⥬":"⥬","ρ":"ρ","ϱ":"ϱ","→":"→","↣":"↣","⇁":"⇁","⇀":"⇀","⇄":"⇄","⇌":"⇌","⇉":"⇉","↝":"↝","⋌":"⋌","˚":"˚","≓":"≓","⇄":"⇄","⇌":"⇌","‏":"‏","⎱":"⎱","⎱":"⎱","⫮":"⫮","⟭":"⟭","⇾":"⇾","⟧":"⟧","⦆":"⦆","𝕣":"𝕣","⨮":"⨮","⨵":"⨵",")":")","⦔":"⦔","⨒":"⨒","⇉":"⇉","›":"›","𝓇":"𝓇","↱":"↱","]":"]","’":"’","’":"’","⋌":"⋌","⋊":"⋊","▹":"▹","⊵":"⊵","▸":"▸","⧎":"⧎","⥨":"⥨","℞":"℞","ś":"ś","‚":"‚","≻":"≻","⪴":"⪴","⪸":"⪸","š":"š","≽":"≽","⪰":"⪰","ş":"ş","ŝ":"ŝ","⪶":"⪶","⪺":"⪺","⋩":"⋩","⨓":"⨓","≿":"≿","с":"с","⋅":"⋅","⊡":"⊡","⩦":"⩦","⇘":"⇘","⤥":"⤥","↘":"↘","↘":"↘","§":"§","§":"§",";":";","⤩":"⤩","∖":"∖","∖":"∖","✶":"✶","𝔰":"𝔰","⌢":"⌢","♯":"♯","щ":"щ","ш":"ш","∣":"∣","∥":"∥","­":"­","­":"­","σ":"σ","ς":"ς","ς":"ς","∼":"∼","⩪":"⩪","≃":"≃","≃":"≃","⪞":"⪞","⪠":"⪠","⪝":"⪝","⪟":"⪟","≆":"≆","⨤":"⨤","⥲":"⥲","←":"←","∖":"∖","⨳":"⨳","⧤":"⧤","∣":"∣","⌣":"⌣","⪪":"⪪","⪬":"⪬","⪬︀":"⪬︀","ь":"ь","/":"/","⧄":"⧄","⌿":"⌿","𝕤":"𝕤","♠":"♠","♠":"♠","∥":"∥","⊓":"⊓","⊓︀":"⊓︀","⊔":"⊔","⊔︀":"⊔︀","⊏":"⊏","⊑":"⊑","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊐":"⊐","⊒":"⊒","□":"□","□":"□","▪":"▪","▪":"▪","→":"→","𝓈":"𝓈","∖":"∖","⌣":"⌣","⋆":"⋆","☆":"☆","★":"★","ϵ":"ϵ","ϕ":"ϕ","¯":"¯","⊂":"⊂","⫅":"⫅","⪽":"⪽","⊆":"⊆","⫃":"⫃","⫁":"⫁","⫋":"⫋","⊊":"⊊","⪿":"⪿","⥹":"⥹","⊂":"⊂","⊆":"⊆","⫅":"⫅","⊊":"⊊","⫋":"⫋","⫇":"⫇","⫕":"⫕","⫓":"⫓","≻":"≻","⪸":"⪸","≽":"≽","⪰":"⪰","⪺":"⪺","⪶":"⪶","⋩":"⋩","≿":"≿","∑":"∑","♪":"♪","¹":"¹","¹":"¹","²":"²","²":"²","³":"³","³":"³","⊃":"⊃","⫆":"⫆","⪾":"⪾","⫘":"⫘","⊇":"⊇","⫄":"⫄","⟉":"⟉","⫗":"⫗","⥻":"⥻","⫂":"⫂","⫌":"⫌","⊋":"⊋","⫀":"⫀","⊃":"⊃","⊇":"⊇","⫆":"⫆","⊋":"⊋","⫌":"⫌","⫈":"⫈","⫔":"⫔","⫖":"⫖","⇙":"⇙","⤦":"⤦","↙":"↙","↙":"↙","⤪":"⤪","ß":"ß","ß":"ß","⌖":"⌖","τ":"τ","⎴":"⎴","ť":"ť","ţ":"ţ","т":"т","⃛":"⃛","⌕":"⌕","𝔱":"𝔱","∴":"∴","∴":"∴","θ":"θ","ϑ":"ϑ","ϑ":"ϑ","≈":"≈","∼":"∼"," ":" ","≈":"≈","∼":"∼","þ":"þ","þ":"þ","˜":"˜","×":"×","×":"×","⊠":"⊠","⨱":"⨱","⨰":"⨰","∭":"∭","⤨":"⤨","⊤":"⊤","⌶":"⌶","⫱":"⫱","𝕥":"𝕥","⫚":"⫚","⤩":"⤩","‴":"‴","™":"™","▵":"▵","▿":"▿","◃":"◃","⊴":"⊴","≜":"≜","▹":"▹","⊵":"⊵","◬":"◬","≜":"≜","⨺":"⨺","⨹":"⨹","⧍":"⧍","⨻":"⨻","⏢":"⏢","𝓉":"𝓉","ц":"ц","ћ":"ћ","ŧ":"ŧ","≬":"≬","↞":"↞","↠":"↠","⇑":"⇑","⥣":"⥣","ú":"ú","ú":"ú","↑":"↑","ў":"ў","ŭ":"ŭ","û":"û","û":"û","у":"у","⇅":"⇅","ű":"ű","⥮":"⥮","⥾":"⥾","𝔲":"𝔲","ù":"ù","ù":"ù","↿":"↿","↾":"↾","▀":"▀","⌜":"⌜","⌜":"⌜","⌏":"⌏","◸":"◸","ū":"ū","¨":"¨","¨":"¨","ų":"ų","𝕦":"𝕦","↑":"↑","↕":"↕","↿":"↿","↾":"↾","⊎":"⊎","υ":"υ","ϒ":"ϒ","υ":"υ","⇈":"⇈","⌝":"⌝","⌝":"⌝","⌎":"⌎","ů":"ů","◹":"◹","𝓊":"𝓊","⋰":"⋰","ũ":"ũ","▵":"▵","▴":"▴","⇈":"⇈","ü":"ü","ü":"ü","⦧":"⦧","⇕":"⇕","⫨":"⫨","⫩":"⫩","⊨":"⊨","⦜":"⦜","ϵ":"ϵ","ϰ":"ϰ","∅":"∅","ϕ":"ϕ","ϖ":"ϖ","∝":"∝","↕":"↕","ϱ":"ϱ","ς":"ς","⊊︀":"⊊︀","⫋︀":"⫋︀","⊋︀":"⊋︀","⫌︀":"⫌︀","ϑ":"ϑ","⊲":"⊲","⊳":"⊳","в":"в","⊢":"⊢","∨":"∨","⊻":"⊻","≚":"≚","⋮":"⋮","|":"|","|":"|","𝔳":"𝔳","⊲":"⊲","⊂⃒":"⊂⃒","⊃⃒":"⊃⃒","𝕧":"𝕧","∝":"∝","⊳":"⊳","𝓋":"𝓋","⫋︀":"⫋︀","⊊︀":"⊊︀","⫌︀":"⫌︀","⊋︀":"⊋︀","⦚":"⦚","ŵ":"ŵ","⩟":"⩟","∧":"∧","≙":"≙","℘":"℘","𝔴":"𝔴","𝕨":"𝕨","℘":"℘","≀":"≀","≀":"≀","𝓌":"𝓌","⋂":"⋂","◯":"◯","⋃":"⋃","▽":"▽","𝔵":"𝔵","⟺":"⟺","⟷":"⟷","ξ":"ξ","⟸":"⟸","⟵":"⟵","⟼":"⟼","⋻":"⋻","⨀":"⨀","𝕩":"𝕩","⨁":"⨁","⨂":"⨂","⟹":"⟹","⟶":"⟶","𝓍":"𝓍","⨆":"⨆","⨄":"⨄","△":"△","⋁":"⋁","⋀":"⋀","ý":"ý","ý":"ý","я":"я","ŷ":"ŷ","ы":"ы","¥":"¥","¥":"¥","𝔶":"𝔶","ї":"ї","𝕪":"𝕪","𝓎":"𝓎","ю":"ю","ÿ":"ÿ","ÿ":"ÿ","ź":"ź","ž":"ž","з":"з","ż":"ż","ℨ":"ℨ","ζ":"ζ","𝔷":"𝔷","ж":"ж","⇝":"⇝","𝕫":"𝕫","𝓏":"𝓏","‍":"‍","‌":"‌"},characters:{"Æ":"Æ","&":"&","Á":"Á","Ă":"Ă","Â":"Â","А":"А","𝔄":"𝔄","À":"À","Α":"Α","Ā":"Ā","⩓":"⩓","Ą":"Ą","𝔸":"𝔸","⁡":"⁡","Å":"Å","𝒜":"𝒜","≔":"≔","Ã":"Ã","Ä":"Ä","∖":"∖","⫧":"⫧","⌆":"⌆","Б":"Б","∵":"∵","ℬ":"ℬ","Β":"Β","𝔅":"𝔅","𝔹":"𝔹","˘":"˘","≎":"≎","Ч":"Ч","©":"©","Ć":"Ć","⋒":"⋒","ⅅ":"ⅅ","ℭ":"ℭ","Č":"Č","Ç":"Ç","Ĉ":"Ĉ","∰":"∰","Ċ":"Ċ","¸":"¸","·":"·","Χ":"Χ","⊙":"⊙","⊖":"⊖","⊕":"⊕","⊗":"⊗","∲":"∲","”":"”","’":"’","∷":"∷","⩴":"⩴","≡":"≡","∯":"∯","∮":"∮","ℂ":"ℂ","∐":"∐","∳":"∳","⨯":"⨯","𝒞":"𝒞","⋓":"⋓","≍":"≍","⤑":"⤑","Ђ":"Ђ","Ѕ":"Ѕ","Џ":"Џ","‡":"‡","↡":"↡","⫤":"⫤","Ď":"Ď","Д":"Д","∇":"∇","Δ":"Δ","𝔇":"𝔇","´":"´","˙":"˙","˝":"˝","`":"`","˜":"˜","⋄":"⋄","ⅆ":"ⅆ","𝔻":"𝔻","¨":"¨","⃜":"⃜","≐":"≐","⇓":"⇓","⇐":"⇐","⇔":"⇔","⟸":"⟸","⟺":"⟺","⟹":"⟹","⇒":"⇒","⊨":"⊨","⇑":"⇑","⇕":"⇕","∥":"∥","↓":"↓","⤓":"⤓","⇵":"⇵","̑":"̑","⥐":"⥐","⥞":"⥞","↽":"↽","⥖":"⥖","⥟":"⥟","⇁":"⇁","⥗":"⥗","⊤":"⊤","↧":"↧","𝒟":"𝒟","Đ":"Đ","Ŋ":"Ŋ","Ð":"Ð","É":"É","Ě":"Ě","Ê":"Ê","Э":"Э","Ė":"Ė","𝔈":"𝔈","È":"È","∈":"∈","Ē":"Ē","◻":"◻","▫":"▫","Ę":"Ę","𝔼":"𝔼","Ε":"Ε","⩵":"⩵","≂":"≂","⇌":"⇌","ℰ":"ℰ","⩳":"⩳","Η":"Η","Ë":"Ë","∃":"∃","ⅇ":"ⅇ","Ф":"Ф","𝔉":"𝔉","◼":"◼","▪":"▪","𝔽":"𝔽","∀":"∀","ℱ":"ℱ","Ѓ":"Ѓ",">":">","Γ":"Γ","Ϝ":"Ϝ","Ğ":"Ğ","Ģ":"Ģ","Ĝ":"Ĝ","Г":"Г","Ġ":"Ġ","𝔊":"𝔊","⋙":"⋙","𝔾":"𝔾","≥":"≥","⋛":"⋛","≧":"≧","⪢":"⪢","≷":"≷","⩾":"⩾","≳":"≳","𝒢":"𝒢","≫":"≫","Ъ":"Ъ","ˇ":"ˇ","^":"^","Ĥ":"Ĥ","ℌ":"ℌ","ℋ":"ℋ","ℍ":"ℍ","─":"─","Ħ":"Ħ","≏":"≏","Е":"Е","IJ":"IJ","Ё":"Ё","Í":"Í","Î":"Î","И":"И","İ":"İ","ℑ":"ℑ","Ì":"Ì","Ī":"Ī","ⅈ":"ⅈ","∬":"∬","∫":"∫","⋂":"⋂","⁣":"⁣","⁢":"⁢","Į":"Į","𝕀":"𝕀","Ι":"Ι","ℐ":"ℐ","Ĩ":"Ĩ","І":"І","Ï":"Ï","Ĵ":"Ĵ","Й":"Й","𝔍":"𝔍","𝕁":"𝕁","𝒥":"𝒥","Ј":"Ј","Є":"Є","Х":"Х","Ќ":"Ќ","Κ":"Κ","Ķ":"Ķ","К":"К","𝔎":"𝔎","𝕂":"𝕂","𝒦":"𝒦","Љ":"Љ","<":"<","Ĺ":"Ĺ","Λ":"Λ","⟪":"⟪","ℒ":"ℒ","↞":"↞","Ľ":"Ľ","Ļ":"Ļ","Л":"Л","⟨":"⟨","←":"←","⇤":"⇤","⇆":"⇆","⌈":"⌈","⟦":"⟦","⥡":"⥡","⇃":"⇃","⥙":"⥙","⌊":"⌊","↔":"↔","⥎":"⥎","⊣":"⊣","↤":"↤","⥚":"⥚","⊲":"⊲","⧏":"⧏","⊴":"⊴","⥑":"⥑","⥠":"⥠","↿":"↿","⥘":"⥘","↼":"↼","⥒":"⥒","⋚":"⋚","≦":"≦","≶":"≶","⪡":"⪡","⩽":"⩽","≲":"≲","𝔏":"𝔏","⋘":"⋘","⇚":"⇚","Ŀ":"Ŀ","⟵":"⟵","⟷":"⟷","⟶":"⟶","𝕃":"𝕃","↙":"↙","↘":"↘","↰":"↰","Ł":"Ł","≪":"≪","⤅":"⤅","М":"М"," ":" ","ℳ":"ℳ","𝔐":"𝔐","∓":"∓","𝕄":"𝕄","Μ":"Μ","Њ":"Њ","Ń":"Ń","Ň":"Ň","Ņ":"Ņ","Н":"Н","​":"​","\n":" ","𝔑":"𝔑","⁠":"⁠"," ":" ","ℕ":"ℕ","⫬":"⫬","≢":"≢","≭":"≭","∦":"∦","∉":"∉","≠":"≠","≂̸":"≂̸","∄":"∄","≯":"≯","≱":"≱","≧̸":"≧̸","≫̸":"≫̸","≹":"≹","⩾̸":"⩾̸","≵":"≵","≎̸":"≎̸","≏̸":"≏̸","⋪":"⋪","⧏̸":"⧏̸","⋬":"⋬","≮":"≮","≰":"≰","≸":"≸","≪̸":"≪̸","⩽̸":"⩽̸","≴":"≴","⪢̸":"⪢̸","⪡̸":"⪡̸","⊀":"⊀","⪯̸":"⪯̸","⋠":"⋠","∌":"∌","⋫":"⋫","⧐̸":"⧐̸","⋭":"⋭","⊏̸":"⊏̸","⋢":"⋢","⊐̸":"⊐̸","⋣":"⋣","⊂⃒":"⊂⃒","⊈":"⊈","⊁":"⊁","⪰̸":"⪰̸","⋡":"⋡","≿̸":"≿̸","⊃⃒":"⊃⃒","⊉":"⊉","≁":"≁","≄":"≄","≇":"≇","≉":"≉","∤":"∤","𝒩":"𝒩","Ñ":"Ñ","Ν":"Ν","Œ":"Œ","Ó":"Ó","Ô":"Ô","О":"О","Ő":"Ő","𝔒":"𝔒","Ò":"Ò","Ō":"Ō","Ω":"Ω","Ο":"Ο","𝕆":"𝕆","“":"“","‘":"‘","⩔":"⩔","𝒪":"𝒪","Ø":"Ø","Õ":"Õ","⨷":"⨷","Ö":"Ö","‾":"‾","⏞":"⏞","⎴":"⎴","⏜":"⏜","∂":"∂","П":"П","𝔓":"𝔓","Φ":"Φ","Π":"Π","±":"±","ℙ":"ℙ","⪻":"⪻","≺":"≺","⪯":"⪯","≼":"≼","≾":"≾","″":"″","∏":"∏","∝":"∝","𝒫":"𝒫","Ψ":"Ψ",'"':""","𝔔":"𝔔","ℚ":"ℚ","𝒬":"𝒬","⤐":"⤐","®":"®","Ŕ":"Ŕ","⟫":"⟫","↠":"↠","⤖":"⤖","Ř":"Ř","Ŗ":"Ŗ","Р":"Р","ℜ":"ℜ","∋":"∋","⇋":"⇋","⥯":"⥯","Ρ":"Ρ","⟩":"⟩","→":"→","⇥":"⇥","⇄":"⇄","⌉":"⌉","⟧":"⟧","⥝":"⥝","⇂":"⇂","⥕":"⥕","⌋":"⌋","⊢":"⊢","↦":"↦","⥛":"⥛","⊳":"⊳","⧐":"⧐","⊵":"⊵","⥏":"⥏","⥜":"⥜","↾":"↾","⥔":"⥔","⇀":"⇀","⥓":"⥓","ℝ":"ℝ","⥰":"⥰","⇛":"⇛","ℛ":"ℛ","↱":"↱","⧴":"⧴","Щ":"Щ","Ш":"Ш","Ь":"Ь","Ś":"Ś","⪼":"⪼","Š":"Š","Ş":"Ş","Ŝ":"Ŝ","С":"С","𝔖":"𝔖","↑":"↑","Σ":"Σ","∘":"∘","𝕊":"𝕊","√":"√","□":"□","⊓":"⊓","⊏":"⊏","⊑":"⊑","⊐":"⊐","⊒":"⊒","⊔":"⊔","𝒮":"𝒮","⋆":"⋆","⋐":"⋐","⊆":"⊆","≻":"≻","⪰":"⪰","≽":"≽","≿":"≿","∑":"∑","⋑":"⋑","⊃":"⊃","⊇":"⊇","Þ":"Þ","™":"™","Ћ":"Ћ","Ц":"Ц","\t":" ","Τ":"Τ","Ť":"Ť","Ţ":"Ţ","Т":"Т","𝔗":"𝔗","∴":"∴","Θ":"Θ","  ":"  "," ":" ","∼":"∼","≃":"≃","≅":"≅","≈":"≈","𝕋":"𝕋","⃛":"⃛","𝒯":"𝒯","Ŧ":"Ŧ","Ú":"Ú","↟":"↟","⥉":"⥉","Ў":"Ў","Ŭ":"Ŭ","Û":"Û","У":"У","Ű":"Ű","𝔘":"𝔘","Ù":"Ù","Ū":"Ū",_:"_","⏟":"⏟","⎵":"⎵","⏝":"⏝","⋃":"⋃","⊎":"⊎","Ų":"Ų","𝕌":"𝕌","⤒":"⤒","⇅":"⇅","↕":"↕","⥮":"⥮","⊥":"⊥","↥":"↥","↖":"↖","↗":"↗","ϒ":"ϒ","Υ":"Υ","Ů":"Ů","𝒰":"𝒰","Ũ":"Ũ","Ü":"Ü","⊫":"⊫","⫫":"⫫","В":"В","⊩":"⊩","⫦":"⫦","⋁":"⋁","‖":"‖","∣":"∣","|":"|","❘":"❘","≀":"≀"," ":" ","𝔙":"𝔙","𝕍":"𝕍","𝒱":"𝒱","⊪":"⊪","Ŵ":"Ŵ","⋀":"⋀","𝔚":"𝔚","𝕎":"𝕎","𝒲":"𝒲","𝔛":"𝔛","Ξ":"Ξ","𝕏":"𝕏","𝒳":"𝒳","Я":"Я","Ї":"Ї","Ю":"Ю","Ý":"Ý","Ŷ":"Ŷ","Ы":"Ы","𝔜":"𝔜","𝕐":"𝕐","𝒴":"𝒴","Ÿ":"Ÿ","Ж":"Ж","Ź":"Ź","Ž":"Ž","З":"З","Ż":"Ż","Ζ":"Ζ","ℨ":"ℨ","ℤ":"ℤ","𝒵":"𝒵","á":"á","ă":"ă","∾":"∾","∾̳":"∾̳","∿":"∿","â":"â","а":"а","æ":"æ","𝔞":"𝔞","à":"à","ℵ":"ℵ","α":"α","ā":"ā","⨿":"⨿","∧":"∧","⩕":"⩕","⩜":"⩜","⩘":"⩘","⩚":"⩚","∠":"∠","⦤":"⦤","∡":"∡","⦨":"⦨","⦩":"⦩","⦪":"⦪","⦫":"⦫","⦬":"⦬","⦭":"⦭","⦮":"⦮","⦯":"⦯","∟":"∟","⊾":"⊾","⦝":"⦝","∢":"∢","⍼":"⍼","ą":"ą","𝕒":"𝕒","⩰":"⩰","⩯":"⩯","≊":"≊","≋":"≋","'":"'","å":"å","𝒶":"𝒶","*":"*","ã":"ã","ä":"ä","⨑":"⨑","⫭":"⫭","≌":"≌","϶":"϶","‵":"‵","∽":"∽","⋍":"⋍","⊽":"⊽","⌅":"⌅","⎶":"⎶","б":"б","„":"„","⦰":"⦰","β":"β","ℶ":"ℶ","≬":"≬","𝔟":"𝔟","◯":"◯","⨀":"⨀","⨁":"⨁","⨂":"⨂","⨆":"⨆","★":"★","▽":"▽","△":"△","⨄":"⨄","⤍":"⤍","⧫":"⧫","▴":"▴","▾":"▾","◂":"◂","▸":"▸","␣":"␣","▒":"▒","░":"░","▓":"▓","█":"█","=⃥":"=⃥","≡⃥":"≡⃥","⌐":"⌐","𝕓":"𝕓","⋈":"⋈","╗":"╗","╔":"╔","╖":"╖","╓":"╓","═":"═","╦":"╦","╩":"╩","╤":"╤","╧":"╧","╝":"╝","╚":"╚","╜":"╜","╙":"╙","║":"║","╬":"╬","╣":"╣","╠":"╠","╫":"╫","╢":"╢","╟":"╟","⧉":"⧉","╕":"╕","╒":"╒","┐":"┐","┌":"┌","╥":"╥","╨":"╨","┬":"┬","┴":"┴","⊟":"⊟","⊞":"⊞","⊠":"⊠","╛":"╛","╘":"╘","┘":"┘","└":"└","│":"│","╪":"╪","╡":"╡","╞":"╞","┼":"┼","┤":"┤","├":"├","¦":"¦","𝒷":"𝒷","⁏":"⁏","\\":"\","⧅":"⧅","⟈":"⟈","•":"•","⪮":"⪮","ć":"ć","∩":"∩","⩄":"⩄","⩉":"⩉","⩋":"⩋","⩇":"⩇","⩀":"⩀","∩︀":"∩︀","⁁":"⁁","⩍":"⩍","č":"č","ç":"ç","ĉ":"ĉ","⩌":"⩌","⩐":"⩐","ċ":"ċ","⦲":"⦲","¢":"¢","𝔠":"𝔠","ч":"ч","✓":"✓","χ":"χ","○":"○","⧃":"⧃","ˆ":"ˆ","≗":"≗","↺":"↺","↻":"↻","Ⓢ":"Ⓢ","⊛":"⊛","⊚":"⊚","⊝":"⊝","⨐":"⨐","⫯":"⫯","⧂":"⧂","♣":"♣",":":":",",":",","@":"@","∁":"∁","⩭":"⩭","𝕔":"𝕔","℗":"℗","↵":"↵","✗":"✗","𝒸":"𝒸","⫏":"⫏","⫑":"⫑","⫐":"⫐","⫒":"⫒","⋯":"⋯","⤸":"⤸","⤵":"⤵","⋞":"⋞","⋟":"⋟","↶":"↶","⤽":"⤽","∪":"∪","⩈":"⩈","⩆":"⩆","⩊":"⩊","⊍":"⊍","⩅":"⩅","∪︀":"∪︀","↷":"↷","⤼":"⤼","⋎":"⋎","⋏":"⋏","¤":"¤","∱":"∱","⌭":"⌭","⥥":"⥥","†":"†","ℸ":"ℸ","‐":"‐","⤏":"⤏","ď":"ď","д":"д","⇊":"⇊","⩷":"⩷","°":"°","δ":"δ","⦱":"⦱","⥿":"⥿","𝔡":"𝔡","♦":"♦","ϝ":"ϝ","⋲":"⋲","÷":"÷","⋇":"⋇","ђ":"ђ","⌞":"⌞","⌍":"⌍",$:"$","𝕕":"𝕕","≑":"≑","∸":"∸","∔":"∔","⊡":"⊡","⌟":"⌟","⌌":"⌌","𝒹":"𝒹","ѕ":"ѕ","⧶":"⧶","đ":"đ","⋱":"⋱","▿":"▿","⦦":"⦦","џ":"џ","⟿":"⟿","é":"é","⩮":"⩮","ě":"ě","≖":"≖","ê":"ê","≕":"≕","э":"э","ė":"ė","≒":"≒","𝔢":"𝔢","⪚":"⪚","è":"è","⪖":"⪖","⪘":"⪘","⪙":"⪙","⏧":"⏧","ℓ":"ℓ","⪕":"⪕","⪗":"⪗","ē":"ē","∅":"∅"," ":" "," ":" "," ":" ","ŋ":"ŋ"," ":" ","ę":"ę","𝕖":"𝕖","⋕":"⋕","⧣":"⧣","⩱":"⩱","ε":"ε","ϵ":"ϵ","=":"=","≟":"≟","⩸":"⩸","⧥":"⧥","≓":"≓","⥱":"⥱","ℯ":"ℯ","η":"η","ð":"ð","ë":"ë","€":"€","!":"!","ф":"ф","♀":"♀","ffi":"ffi","ff":"ff","ffl":"ffl","𝔣":"𝔣","fi":"fi",fj:"fj","♭":"♭","fl":"fl","▱":"▱","ƒ":"ƒ","𝕗":"𝕗","⋔":"⋔","⫙":"⫙","⨍":"⨍","½":"½","⅓":"⅓","¼":"¼","⅕":"⅕","⅙":"⅙","⅛":"⅛","⅔":"⅔","⅖":"⅖","¾":"¾","⅗":"⅗","⅜":"⅜","⅘":"⅘","⅚":"⅚","⅝":"⅝","⅞":"⅞","⁄":"⁄","⌢":"⌢","𝒻":"𝒻","⪌":"⪌","ǵ":"ǵ","γ":"γ","⪆":"⪆","ğ":"ğ","ĝ":"ĝ","г":"г","ġ":"ġ","⪩":"⪩","⪀":"⪀","⪂":"⪂","⪄":"⪄","⋛︀":"⋛︀","⪔":"⪔","𝔤":"𝔤","ℷ":"ℷ","ѓ":"ѓ","⪒":"⪒","⪥":"⪥","⪤":"⪤","≩":"≩","⪊":"⪊","⪈":"⪈","⋧":"⋧","𝕘":"𝕘","ℊ":"ℊ","⪎":"⪎","⪐":"⪐","⪧":"⪧","⩺":"⩺","⋗":"⋗","⦕":"⦕","⩼":"⩼","⥸":"⥸","≩︀":"≩︀","ъ":"ъ","⥈":"⥈","↭":"↭","ℏ":"ℏ","ĥ":"ĥ","♥":"♥","…":"…","⊹":"⊹","𝔥":"𝔥","⤥":"⤥","⤦":"⤦","⇿":"⇿","∻":"∻","↩":"↩","↪":"↪","𝕙":"𝕙","―":"―","𝒽":"𝒽","ħ":"ħ","⁃":"⁃","í":"í","î":"î","и":"и","е":"е","¡":"¡","𝔦":"𝔦","ì":"ì","⨌":"⨌","∭":"∭","⧜":"⧜","℩":"℩","ij":"ij","ī":"ī","ı":"ı","⊷":"⊷","Ƶ":"Ƶ","℅":"℅","∞":"∞","⧝":"⧝","⊺":"⊺","⨗":"⨗","⨼":"⨼","ё":"ё","į":"į","𝕚":"𝕚","ι":"ι","¿":"¿","𝒾":"𝒾","⋹":"⋹","⋵":"⋵","⋴":"⋴","⋳":"⋳","ĩ":"ĩ","і":"і","ï":"ï","ĵ":"ĵ","й":"й","𝔧":"𝔧","ȷ":"ȷ","𝕛":"𝕛","𝒿":"𝒿","ј":"ј","є":"є","κ":"κ","ϰ":"ϰ","ķ":"ķ","к":"к","𝔨":"𝔨","ĸ":"ĸ","х":"х","ќ":"ќ","𝕜":"𝕜","𝓀":"𝓀","⤛":"⤛","⤎":"⤎","⪋":"⪋","⥢":"⥢","ĺ":"ĺ","⦴":"⦴","λ":"λ","⦑":"⦑","⪅":"⪅","«":"«","⤟":"⤟","⤝":"⤝","↫":"↫","⤹":"⤹","⥳":"⥳","↢":"↢","⪫":"⪫","⤙":"⤙","⪭":"⪭","⪭︀":"⪭︀","⤌":"⤌","❲":"❲","{":"{","[":"[","⦋":"⦋","⦏":"⦏","⦍":"⦍","ľ":"ľ","ļ":"ļ","л":"л","⤶":"⤶","⥧":"⥧","⥋":"⥋","↲":"↲","≤":"≤","⇇":"⇇","⋋":"⋋","⪨":"⪨","⩿":"⩿","⪁":"⪁","⪃":"⪃","⋚︀":"⋚︀","⪓":"⪓","⋖":"⋖","⥼":"⥼","𝔩":"𝔩","⪑":"⪑","⥪":"⥪","▄":"▄","љ":"љ","⥫":"⥫","◺":"◺","ŀ":"ŀ","⎰":"⎰","≨":"≨","⪉":"⪉","⪇":"⪇","⋦":"⋦","⟬":"⟬","⇽":"⇽","⟼":"⟼","↬":"↬","⦅":"⦅","𝕝":"𝕝","⨭":"⨭","⨴":"⨴","∗":"∗","◊":"◊","(":"(","⦓":"⦓","⥭":"⥭","‎":"‎","⊿":"⊿","‹":"‹","𝓁":"𝓁","⪍":"⪍","⪏":"⪏","‚":"‚","ł":"ł","⪦":"⪦","⩹":"⩹","⋉":"⋉","⥶":"⥶","⩻":"⩻","⦖":"⦖","◃":"◃","⥊":"⥊","⥦":"⥦","≨︀":"≨︀","∺":"∺","¯":"¯","♂":"♂","✠":"✠","▮":"▮","⨩":"⨩","м":"м","—":"—","𝔪":"𝔪","℧":"℧","µ":"µ","⫰":"⫰","−":"−","⨪":"⨪","⫛":"⫛","⊧":"⊧","𝕞":"𝕞","𝓂":"𝓂","μ":"μ","⊸":"⊸","⋙̸":"⋙̸","≫⃒":"≫⃒","⇍":"⇍","⇎":"⇎","⋘̸":"⋘̸","≪⃒":"≪⃒","⇏":"⇏","⊯":"⊯","⊮":"⊮","ń":"ń","∠⃒":"∠⃒","⩰̸":"⩰̸","≋̸":"≋̸","ʼn":"ʼn","♮":"♮","⩃":"⩃","ň":"ň","ņ":"ņ","⩭̸":"⩭̸","⩂":"⩂","н":"н","–":"–","⇗":"⇗","⤤":"⤤","≐̸":"≐̸","⤨":"⤨","𝔫":"𝔫","↮":"↮","⫲":"⫲","⋼":"⋼","⋺":"⋺","њ":"њ","≦̸":"≦̸","↚":"↚","‥":"‥","𝕟":"𝕟","¬":"¬","⋹̸":"⋹̸","⋵̸":"⋵̸","⋷":"⋷","⋶":"⋶","⋾":"⋾","⋽":"⋽","⫽⃥":"⫽⃥","∂̸":"∂̸","⨔":"⨔","↛":"↛","⤳̸":"⤳̸","↝̸":"↝̸","𝓃":"𝓃","⊄":"⊄","⫅̸":"⫅̸","⊅":"⊅","⫆̸":"⫆̸","ñ":"ñ","ν":"ν","#":"#","№":"№"," ":" ","⊭":"⊭","⤄":"⤄","≍⃒":"≍⃒","⊬":"⊬","≥⃒":"≥⃒",">⃒":">⃒","⧞":"⧞","⤂":"⤂","≤⃒":"≤⃒","<⃒":"<⃒","⊴⃒":"⊴⃒","⤃":"⤃","⊵⃒":"⊵⃒","∼⃒":"∼⃒","⇖":"⇖","⤣":"⤣","⤧":"⤧","ó":"ó","ô":"ô","о":"о","ő":"ő","⨸":"⨸","⦼":"⦼","œ":"œ","⦿":"⦿","𝔬":"𝔬","˛":"˛","ò":"ò","⧁":"⧁","⦵":"⦵","⦾":"⦾","⦻":"⦻","⧀":"⧀","ō":"ō","ω":"ω","ο":"ο","⦶":"⦶","𝕠":"𝕠","⦷":"⦷","⦹":"⦹","∨":"∨","⩝":"⩝","ℴ":"ℴ","ª":"ª","º":"º","⊶":"⊶","⩖":"⩖","⩗":"⩗","⩛":"⩛","ø":"ø","⊘":"⊘","õ":"õ","⨶":"⨶","ö":"ö","⌽":"⌽","¶":"¶","⫳":"⫳","⫽":"⫽","п":"п","%":"%",".":".","‰":"‰","‱":"‱","𝔭":"𝔭","φ":"φ","ϕ":"ϕ","☎":"☎","π":"π","ϖ":"ϖ","ℎ":"ℎ","+":"+","⨣":"⨣","⨢":"⨢","⨥":"⨥","⩲":"⩲","⨦":"⨦","⨧":"⨧","⨕":"⨕","𝕡":"𝕡","£":"£","⪳":"⪳","⪷":"⪷","⪹":"⪹","⪵":"⪵","⋨":"⋨","′":"′","⌮":"⌮","⌒":"⌒","⌓":"⌓","⊰":"⊰","𝓅":"𝓅","ψ":"ψ"," ":" ","𝔮":"𝔮","𝕢":"𝕢","⁗":"⁗","𝓆":"𝓆","⨖":"⨖","?":"?","⤜":"⤜","⥤":"⥤","∽̱":"∽̱","ŕ":"ŕ","⦳":"⦳","⦒":"⦒","⦥":"⦥","»":"»","⥵":"⥵","⤠":"⤠","⤳":"⤳","⤞":"⤞","⥅":"⥅","⥴":"⥴","↣":"↣","↝":"↝","⤚":"⤚","∶":"∶","❳":"❳","}":"}","]":"]","⦌":"⦌","⦎":"⦎","⦐":"⦐","ř":"ř","ŗ":"ŗ","р":"р","⤷":"⤷","⥩":"⥩","↳":"↳","▭":"▭","⥽":"⥽","𝔯":"𝔯","⥬":"⥬","ρ":"ρ","ϱ":"ϱ","⇉":"⇉","⋌":"⋌","˚":"˚","‏":"‏","⎱":"⎱","⫮":"⫮","⟭":"⟭","⇾":"⇾","⦆":"⦆","𝕣":"𝕣","⨮":"⨮","⨵":"⨵",")":")","⦔":"⦔","⨒":"⨒","›":"›","𝓇":"𝓇","⋊":"⋊","▹":"▹","⧎":"⧎","⥨":"⥨","℞":"℞","ś":"ś","⪴":"⪴","⪸":"⪸","š":"š","ş":"ş","ŝ":"ŝ","⪶":"⪶","⪺":"⪺","⋩":"⋩","⨓":"⨓","с":"с","⋅":"⋅","⩦":"⩦","⇘":"⇘","§":"§",";":";","⤩":"⤩","✶":"✶","𝔰":"𝔰","♯":"♯","щ":"щ","ш":"ш","­":"­","σ":"σ","ς":"ς","⩪":"⩪","⪞":"⪞","⪠":"⪠","⪝":"⪝","⪟":"⪟","≆":"≆","⨤":"⨤","⥲":"⥲","⨳":"⨳","⧤":"⧤","⌣":"⌣","⪪":"⪪","⪬":"⪬","⪬︀":"⪬︀","ь":"ь","/":"/","⧄":"⧄","⌿":"⌿","𝕤":"𝕤","♠":"♠","⊓︀":"⊓︀","⊔︀":"⊔︀","𝓈":"𝓈","☆":"☆","⊂":"⊂","⫅":"⫅","⪽":"⪽","⫃":"⫃","⫁":"⫁","⫋":"⫋","⊊":"⊊","⪿":"⪿","⥹":"⥹","⫇":"⫇","⫕":"⫕","⫓":"⫓","♪":"♪","¹":"¹","²":"²","³":"³","⫆":"⫆","⪾":"⪾","⫘":"⫘","⫄":"⫄","⟉":"⟉","⫗":"⫗","⥻":"⥻","⫂":"⫂","⫌":"⫌","⊋":"⊋","⫀":"⫀","⫈":"⫈","⫔":"⫔","⫖":"⫖","⇙":"⇙","⤪":"⤪","ß":"ß","⌖":"⌖","τ":"τ","ť":"ť","ţ":"ţ","т":"т","⌕":"⌕","𝔱":"𝔱","θ":"θ","ϑ":"ϑ","þ":"þ","×":"×","⨱":"⨱","⨰":"⨰","⌶":"⌶","⫱":"⫱","𝕥":"𝕥","⫚":"⫚","‴":"‴","▵":"▵","≜":"≜","◬":"◬","⨺":"⨺","⨹":"⨹","⧍":"⧍","⨻":"⨻","⏢":"⏢","𝓉":"𝓉","ц":"ц","ћ":"ћ","ŧ":"ŧ","⥣":"⥣","ú":"ú","ў":"ў","ŭ":"ŭ","û":"û","у":"у","ű":"ű","⥾":"⥾","𝔲":"𝔲","ù":"ù","▀":"▀","⌜":"⌜","⌏":"⌏","◸":"◸","ū":"ū","ų":"ų","𝕦":"𝕦","υ":"υ","⇈":"⇈","⌝":"⌝","⌎":"⌎","ů":"ů","◹":"◹","𝓊":"𝓊","⋰":"⋰","ũ":"ũ","ü":"ü","⦧":"⦧","⫨":"⫨","⫩":"⫩","⦜":"⦜","⊊︀":"⊊︀","⫋︀":"⫋︀","⊋︀":"⊋︀","⫌︀":"⫌︀","в":"в","⊻":"⊻","≚":"≚","⋮":"⋮","𝔳":"𝔳","𝕧":"𝕧","𝓋":"𝓋","⦚":"⦚","ŵ":"ŵ","⩟":"⩟","≙":"≙","℘":"℘","𝔴":"𝔴","𝕨":"𝕨","𝓌":"𝓌","𝔵":"𝔵","ξ":"ξ","⋻":"⋻","𝕩":"𝕩","𝓍":"𝓍","ý":"ý","я":"я","ŷ":"ŷ","ы":"ы","¥":"¥","𝔶":"𝔶","ї":"ї","𝕪":"𝕪","𝓎":"𝓎","ю":"ю","ÿ":"ÿ","ź":"ź","ž":"ž","з":"з","ż":"ż","ζ":"ζ","𝔷":"𝔷","ж":"ж","⇝":"⇝","𝕫":"𝕫","𝓏":"𝓏","‍":"‍","‌":"‌"}}}; -//# sourceMappingURL=./named-references.js.map + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value -/***/ }), + For example, this would serialize Dates as ISO strings. -/***/ 5439: -/***/ ((__unused_webpack_module, exports) => { + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } -"use strict"; -Object.defineProperty(exports, "__esModule", ({value:true}));exports.numericUnicodeMap={0:65533,128:8364,130:8218,131:402,132:8222,133:8230,134:8224,135:8225,136:710,137:8240,138:352,139:8249,140:338,142:381,145:8216,146:8217,147:8220,148:8221,149:8226,150:8211,151:8212,152:732,153:8482,154:353,155:8250,156:339,158:382,159:376}; -//# sourceMappingURL=./numeric-unicode-map.js.map + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; -/***/ }), + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. -/***/ 1454: -/***/ ((__unused_webpack_module, exports) => { + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. -"use strict"; -Object.defineProperty(exports, "__esModule", ({value:true}));exports.fromCodePoint=String.fromCodePoint||function(astralCodePoint){return String.fromCharCode(Math.floor((astralCodePoint-65536)/1024)+55296,(astralCodePoint-65536)%1024+56320)};exports.getCodePoint=String.prototype.codePointAt?function(input,position){return input.codePointAt(position)}:function(input,position){return(input.charCodeAt(position)-55296)*1024+input.charCodeAt(position+1)-56320+65536};exports.highSurrogateFrom=55296;exports.highSurrogateTo=56319; -//# sourceMappingURL=./surrogate-pairs.js.map + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. -/***/ }), + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. -/***/ 7492: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. -"use strict"; + Example: -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(1808)); -const tls_1 = __importDefault(__nccwpck_require__(4404)); -const url_1 = __importDefault(__nccwpck_require__(7310)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const once_1 = __importDefault(__nccwpck_require__(1040)); -const agent_base_1 = __nccwpck_require__(7635); -const debug = (0, debug_1.default)('http-proxy-agent'); -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -/** - * The `HttpProxyAgent` implements an HTTP Agent subclass that connects - * to the specified "HTTP proxy server" in order to proxy HTTP requests. - * - * @api public - */ -class HttpProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('Creating new HttpProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; - } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - const parsed = url_1.default.parse(req.path); - if (!parsed.protocol) { - parsed.protocol = 'http:'; - } - if (!parsed.hostname) { - parsed.hostname = opts.hostname || opts.host || null; - } - if (parsed.port == null && typeof opts.port) { - parsed.port = String(opts.port); - } - if (parsed.port === '80') { - // if port is 80, then we can remove the port so that the - // ":80" portion is not on the produced URL - parsed.port = ''; - } - // Change the `http.ClientRequest` instance's "path" field - // to the absolute path of the URL that will be requested. - req.path = url_1.default.format(parsed); - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); - } - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - // At this point, the http ClientRequest's internal `_header` field - // might have already been set. If this is the case then we'll need - // to re-generate the string since we just changed the `req.path`. - if (req._header) { - let first; - let endOfHeaders; - debug('Regenerating stored HTTP header string for request'); - req._header = null; - req._implicitHeader(); - if (req.output && req.output.length > 0) { - // Node < 12 - debug('Patching connection write() output buffer with updated header'); - first = req.output[0]; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.output[0] = req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.output); - } - else if (req.outputData && req.outputData.length > 0) { - // Node >= 12 - debug('Patching connection write() output buffer with updated header'); - first = req.outputData[0].data; - endOfHeaders = first.indexOf('\r\n\r\n') + 4; - req.outputData[0].data = - req._header + first.substring(endOfHeaders); - debug('Output buffer: %o', req.outputData[0].data); - } - } - // Wait for the socket's `connect` event, so that this `callback()` - // function throws instead of the `http` request machinery. This is - // important for i.e. `PacProxyAgent` which determines a failed proxy - // connection via the `callback()` function throwing. - yield (0, once_1.default)(socket, 'connect'); - return socket; - }); - } -} -exports["default"] = HttpProxyAgent; -//# sourceMappingURL=agent.js.map + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' -/***/ }), -/***/ 3764: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' -"use strict"; + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(7492)); -function createHttpProxyAgent(opts) { - return new agent_1.default(opts); -} -(function (createHttpProxyAgent) { - createHttpProxyAgent.HttpProxyAgent = agent_1.default; - createHttpProxyAgent.prototype = agent_1.default.prototype; -})(createHttpProxyAgent || (createHttpProxyAgent = {})); -module.exports = createHttpProxyAgent; -//# sourceMappingURL=index.js.map -/***/ }), + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. -/***/ 7635: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. -"use strict"; + Example: -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const events_1 = __nccwpck_require__(2361); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const promisify_1 = __importDefault(__nccwpck_require__(9965)); -const debug = debug_1.default('agent-base'); -function isAgent(v) { - return Boolean(v) && typeof v.addRequest === 'function'; -} -function isSecureEndpoint() { - const { stack } = new Error(); - if (typeof stack !== 'string') - return false; - return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); -} -function createAgent(callback, opts) { - return new createAgent.Agent(callback, opts); -} -(function (createAgent) { - /** - * Base `http.Agent` implementation. - * No pooling/keep-alive is implemented by default. - * - * @param {Function} callback - * @api public - */ - class Agent extends events_1.EventEmitter { - constructor(callback, _opts) { - super(); - let opts = _opts; - if (typeof callback === 'function') { - this.callback = callback; - } - else if (callback) { - opts = callback; - } - // Timeout for the socket to be returned from the callback - this.timeout = null; - if (opts && typeof opts.timeout === 'number') { - this.timeout = opts.timeout; - } - // These aren't actually used by `agent-base`, but are required - // for the TypeScript definition files in `@types/node` :/ - this.maxFreeSockets = 1; - this.maxSockets = 1; - this.maxTotalSockets = Infinity; - this.sockets = {}; - this.freeSockets = {}; - this.requests = {}; - this.options = {}; - } - get defaultPort() { - if (typeof this.explicitDefaultPort === 'number') { - return this.explicitDefaultPort; - } - return isSecureEndpoint() ? 443 : 80; - } - set defaultPort(v) { - this.explicitDefaultPort = v; - } - get protocol() { - if (typeof this.explicitProtocol === 'string') { - return this.explicitProtocol; - } - return isSecureEndpoint() ? 'https:' : 'http:'; - } - set protocol(v) { - this.explicitProtocol = v; - } - callback(req, opts, fn) { - throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); - } - /** - * Called by node-core's "_http_client.js" module when creating - * a new HTTP request with this Agent instance. - * - * @api public - */ - addRequest(req, _opts) { - const opts = Object.assign({}, _opts); - if (typeof opts.secureEndpoint !== 'boolean') { - opts.secureEndpoint = isSecureEndpoint(); - } - if (opts.host == null) { - opts.host = 'localhost'; - } - if (opts.port == null) { - opts.port = opts.secureEndpoint ? 443 : 80; - } - if (opts.protocol == null) { - opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; - } - if (opts.host && opts.path) { - // If both a `host` and `path` are specified then it's most - // likely the result of a `url.parse()` call... we need to - // remove the `path` portion so that `net.connect()` doesn't - // attempt to open that as a unix socket file. - delete opts.path; - } - delete opts.agent; - delete opts.hostname; - delete opts._defaultAgent; - delete opts.defaultPort; - delete opts.createConnection; - // Hint to use "Connection: close" - // XXX: non-documented `http` module API :( - req._last = true; - req.shouldKeepAlive = false; - let timedOut = false; - let timeoutId = null; - const timeoutMs = opts.timeout || this.timeout; - const onerror = (err) => { - if (req._hadError) - return; - req.emit('error', err); - // For Safety. Some additional errors might fire later on - // and we need to make sure we don't double-fire the error event. - req._hadError = true; - }; - const ontimeout = () => { - timeoutId = null; - timedOut = true; - const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); - err.code = 'ETIMEOUT'; - onerror(err); - }; - const callbackError = (err) => { - if (timedOut) - return; - if (timeoutId !== null) { - clearTimeout(timeoutId); - timeoutId = null; - } - onerror(err); - }; - const onsocket = (socket) => { - if (timedOut) - return; - if (timeoutId != null) { - clearTimeout(timeoutId); - timeoutId = null; - } - if (isAgent(socket)) { - // `socket` is actually an `http.Agent` instance, so - // relinquish responsibility for this `req` to the Agent - // from here on - debug('Callback returned another Agent instance %o', socket.constructor.name); - socket.addRequest(req, opts); - return; - } - if (socket) { - socket.once('free', () => { - this.freeSocket(socket, opts); - }); - req.onSocket(socket); - return; - } - const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); - onerror(err); - }; - if (typeof this.callback !== 'function') { - onerror(new Error('`callback` is not defined')); - return; - } - if (!this.promisifiedCallback) { - if (this.callback.length >= 3) { - debug('Converting legacy callback function to promise'); - this.promisifiedCallback = promisify_1.default(this.callback); + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } } - else { - this.promisifiedCallback = this.callback; + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } } - } - if (typeof timeoutMs === 'number' && timeoutMs > 0) { - timeoutId = setTimeout(ontimeout, timeoutMs); - } - if ('port' in opts && typeof opts.port !== 'number') { - opts.port = Number(opts.port); - } - try { - debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); - Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); - } - catch (err) { - Promise.reject(err).catch(callbackError); - } - } - freeSocket(socket, opts) { - debug('Freeing socket %o %o', socket.constructor.name, opts); - socket.destroy(); - } - destroy() { - debug('Destroying agent %o', this.constructor.name); - } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; } - createAgent.Agent = Agent; - // So that `instanceof` works correctly - createAgent.prototype = createAgent.Agent.prototype; -})(createAgent || (createAgent = {})); -module.exports = createAgent; -//# sourceMappingURL=index.js.map -/***/ }), + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; -/***/ 9965: -/***/ ((__unused_webpack_module, exports) => { -"use strict"; + function quote(string) { -Object.defineProperty(exports, "__esModule", ({ value: true })); -function promisify(fn) { - return function (req, opts) { - return new Promise((resolve, reject) => { - fn.call(this, req, opts, (err, rtn) => { - if (err) { - reject(err); - } - else { - resolve(rtn); - } - }); - }); - }; -} -exports["default"] = promisify; -//# sourceMappingURL=promisify.js.map +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. -/***/ }), + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } -/***/ 7219: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -"use strict"; + function str(key, holder) { -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpsProxyAgent = void 0; -const net = __importStar(__nccwpck_require__(1808)); -const tls = __importStar(__nccwpck_require__(4404)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const agent_base_1 = __nccwpck_require__(694); -const url_1 = __nccwpck_require__(7310); -const parse_proxy_response_1 = __nccwpck_require__(595); -const debug = (0, debug_1.default)('https-proxy-agent'); -/** - * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to - * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. - * - * Outgoing HTTP requests are first tunneled through the proxy server using the - * `CONNECT` HTTP request method to establish a connection to the proxy server, - * and then the proxy server connects to the destination target and issues the - * HTTP request from the proxy server. - * - * `https:` requests have their socket connection upgraded to TLS once - * the connection to the proxy server has been established. - */ -class HttpsProxyAgent extends agent_base_1.Agent { - constructor(proxy, opts) { - super(opts); - this.options = { path: undefined }; - this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; - this.proxyHeaders = opts?.headers ?? {}; - debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); - // Trim off the brackets from IPv6 addresses - const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); - const port = this.proxy.port - ? parseInt(this.proxy.port, 10) - : this.proxy.protocol === 'https:' - ? 443 - : 80; - this.connectOpts = { - // Attempt to negotiate http/1.1 for proxy servers that support http/2 - ALPNProtocols: ['http/1.1'], - ...(opts ? omit(opts, 'headers') : null), - host, - port, - }; - } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - */ - async connect(req, opts) { - const { proxy } = this; - if (!opts.host) { - throw new TypeError('No "host" provided'); - } - // Create a socket connection to the proxy server. - let socket; - if (proxy.protocol === 'https:') { - debug('Creating `tls.Socket`: %o', this.connectOpts); - const servername = this.connectOpts.servername || this.connectOpts.host; - socket = tls.connect({ - ...this.connectOpts, - servername: servername && net.isIP(servername) ? undefined : servername, - }); - } - else { - debug('Creating `net.Socket`: %o', this.connectOpts); - socket = net.connect(this.connectOpts); - } - const headers = typeof this.proxyHeaders === 'function' - ? this.proxyHeaders() - : { ...this.proxyHeaders }; - const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; - let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.username || proxy.password) { - const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; - headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; - } - headers.Host = `${host}:${opts.port}`; - if (!headers['Proxy-Connection']) { - headers['Proxy-Connection'] = this.keepAlive - ? 'Keep-Alive' - : 'close'; +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); } - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r\n`; + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); } - const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); - socket.write(`${payload}\r\n`); - const { connect, buffered } = await proxyResponsePromise; - req.emit('proxyConnect', connect); - this.emit('proxyConnect', connect, req); - if (connect.statusCode === 200) { - req.once('socket', resume); - if (opts.secureEndpoint) { - // The proxy is connecting to a TLS server, so upgrade - // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); - const servername = opts.servername || opts.host; - return tls.connect({ - ...omit(opts, 'host', 'path', 'port'), - socket, - servername: net.isIP(servername) ? undefined : servername, - }); + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); } - return socket; - } - // Some other status code that's not 200... need to re-play the HTTP - // header "data" events onto the socket once the HTTP machinery is - // attached so that the node core `http` can parse and handle the - // error status code. - // Close the original socket, and a new "fake" socket is returned - // instead, so that the proxy doesn't get the HTTP request - // written to it (which may contain `Authorization` headers or other - // sensitive data). - // - // See: https://hackerone.com/reports/541502 - socket.destroy(); - const fakeSocket = new net.Socket({ writable: false }); - fakeSocket.readable = true; - // Need to wait for the "socket" event to re-play the "data" events. - req.once('socket', (s) => { - debug('Replaying proxy buffer for failed request'); - (0, assert_1.default)(s.listenerCount('data') > 0); - // Replay the "buffered" Buffer onto the fake `socket`, since at - // this point the HTTP module machinery has been hooked up for - // the user. - s.push(buffered); - s.push(null); - }); - return fakeSocket; - } -} -HttpsProxyAgent.protocols = ['http', 'https']; -exports.HttpsProxyAgent = HttpsProxyAgent; -function resume(socket) { - socket.resume(); -} -function omit(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } - } - return ret; -} -//# sourceMappingURL=index.js.map -/***/ }), + case 'number': -/***/ 595: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +// JSON numbers must be finite. Encode non-finite numbers as null. -"use strict"; + return isFinite(value) ? String(value) : 'null'; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseProxyResponse = void 0; -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); -function parseProxyResponse(socket) { - return new Promise((resolve, reject) => { - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once('readable', read); - } - function cleanup() { - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('readable', read); - } - function onend() { - cleanup(); - debug('onend'); - reject(new Error('Proxy connection ended before receiving CONNECT response')); - } - function onerror(err) { - cleanup(); - debug('onerror %o', err); - reject(err); - } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf('\r\n\r\n'); - if (endOfHeaders === -1) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - read(); - return; - } - const headerParts = buffered - .slice(0, endOfHeaders) - .toString('ascii') - .split('\r\n'); - const firstLine = headerParts.shift(); - if (!firstLine) { - socket.destroy(); - return reject(new Error('No header received from proxy CONNECT response')); - } - const firstLineParts = firstLine.split(' '); - const statusCode = +firstLineParts[1]; - const statusText = firstLineParts.slice(2).join(' '); - const headers = {}; - for (const header of headerParts) { - if (!header) - continue; - const firstColon = header.indexOf(':'); - if (firstColon === -1) { - socket.destroy(); - return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); - } - const key = header.slice(0, firstColon).toLowerCase(); - const value = header.slice(firstColon + 1).trimStart(); - const current = headers[key]; - if (typeof current === 'string') { - headers[key] = [current, value]; - } - else if (Array.isArray(current)) { - current.push(value); - } - else { - headers[key] = value; - } + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; } - debug('got proxy server response: %o %o', firstLine, headers); - cleanup(); - resolve({ - connect: { - statusCode, - statusText, - headers, - }, - buffered, - }); - } - socket.on('error', onerror); - socket.on('end', onend); - read(); - }); -} -exports.parseProxyResponse = parseProxyResponse; -//# sourceMappingURL=parse-proxy-response.js.map -/***/ }), +// Make an array to hold the partial results of stringifying this object value. -/***/ 4124: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + gap += indent; + partial = []; -try { - var util = __nccwpck_require__(3837); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __nccwpck_require__(8544); -} +// Is the value an array? + if (Object.prototype.toString.apply(value) === '[object Array]') { -/***/ }), +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. -/***/ 8544: -/***/ ((module) => { + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } - } -} +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } -/***/ }), +// If the replacer is an array, use it to select the members to be stringified. -/***/ 1554: -/***/ ((module) => { + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { -"use strict"; +// Otherwise, iterate through all of the keys in the object. + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } -const isStream = stream => - stream !== null && - typeof stream === 'object' && - typeof stream.pipe === 'function'; +// Join all of the member texts together, separated with commas, +// and wrap them in braces. -isStream.writable = stream => - isStream(stream) && - stream.writable !== false && - typeof stream._write === 'function' && - typeof stream._writableState === 'object'; + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } -isStream.readable = stream => - isStream(stream) && - stream.readable !== false && - typeof stream._read === 'function' && - typeof stream._readableState === 'object'; +// If the JSON object does not yet have a stringify method, give it one. -isStream.duplex = stream => - isStream.writable(stream) && - isStream.readable(stream); + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { -isStream.transform = stream => - isStream.duplex(stream) && - typeof stream._transform === 'function'; +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } -module.exports = isStream; +// If the space parameter is a string, it will be used as the indent string. + } else if (typeof space === 'string') { + indent = space; + } -/***/ }), +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. -/***/ 5031: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } -var json_stringify = (__nccwpck_require__(8574).stringify); -var json_parse = __nccwpck_require__(9099); +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. -module.exports = function(options) { - return { - parse: json_parse(options), - stringify: json_stringify + return str('', {'': value}); + }; } -}; -//create the default method members with no options applied for backwards compatibility -module.exports.parse = json_parse(); -module.exports.stringify = json_stringify; +}()); /***/ }), -/***/ 9099: +/***/ 96010: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var BigNumber = null; +var bufferEqual = __nccwpck_require__(9239); +var Buffer = (__nccwpck_require__(21867).Buffer); +var crypto = __nccwpck_require__(6113); +var formatEcdsa = __nccwpck_require__(11728); +var util = __nccwpck_require__(73837); -// regexpxs extracted from -// (c) BSD-3-Clause -// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; -const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; -const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} -/* - json_parse.js - 2012-06-20 +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } - Public Domain. + if (typeof key === 'string') { + return; + } - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } - This file creates a json_parse function. - During create you can (optionally) specify some behavioural switches + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } - require('json-bigint')(options) + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } - The optional options parameter holds switches that drive certain - aspects of the parsing process: - * options.strict = true will warn about duplicate-key usage in the json. - The default (strict = false) will silently ignore those and overwrite - values for keys that are in duplicate use. + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } - The resulting function follows this signature: - json_parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } - Example: + if (typeof key === 'string') { + return; + } - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. + if (typeof key === 'object') { + return; + } - myData = json_parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); + throw typeError(MSG_INVALID_SIGNER_KEY); +}; - This is a reference implementation. You are free to copy, modify, or - redistribute. +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html + if (typeof key === 'string') { + return key; + } - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. -*/ + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } -/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, - hasOwnProperty, message, n, name, prototype, push, r, t, text -*/ + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } -var json_parse = function (options) { - 'use strict'; + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } - // This is a function that can parse a JSON text, producing a JavaScript - // data structure. It is a simple, recursive descent parser. It does not use - // eval or regular expressions, so it can be used as a model for implementing - // a JSON parser in other languages. + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} - // We are defining the function inside of another function to avoid creating - // global variables. +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} - // Default options one can override by passing options to the parse() - var _options = { - strict: false, // not being strict means do not generate syntax errors for "duplicate key" - storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string - alwaysParseAsBig: false, // toggles whether all numbers should be Big - useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js - protoAction: 'error', - constructorAction: 'error', - }; +function toBase64(base64url) { + base64url = base64url.toString(); - // If there are options, then use them to override the default _options - if (options !== undefined && options !== null) { - if (options.strict === true) { - _options.strict = true; - } - if (options.storeAsString === true) { - _options.storeAsString = true; + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; } - _options.alwaysParseAsBig = - options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; - _options.useNativeBigInt = - options.useNativeBigInt === true ? options.useNativeBigInt : false; + } - if (typeof options.constructorAction !== 'undefined') { - if ( - options.constructorAction === 'error' || - options.constructorAction === 'ignore' || - options.constructorAction === 'preserve' - ) { - _options.constructorAction = options.constructorAction; - } else { - throw new Error( - `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` - ); - } - } + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} - if (typeof options.protoAction !== 'undefined') { - if ( - options.protoAction === 'error' || - options.protoAction === 'ignore' || - options.protoAction === 'preserve' - ) { - _options.protoAction = options.protoAction; - } else { - throw new Error( - `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` - ); - } - } +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); } +} - var at, // The index of the current character - ch, // The current character - escapee = { - '"': '"', - '\\': '\\', - '/': '/', - b: '\b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', - }, - text, - error = function (m) { - // Call error when something is wrong. +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} - throw { - name: 'SyntaxError', - message: m, - at: at, - text: text, - }; - }, - next = function (c) { - // If a c parameter is provided, verify that it matches the current character. +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} - if (c && c !== ch) { - error("Expected '" + c + "' instead of '" + ch + "'"); - } +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} - // Get the next character. When there are no more characters, - // return the empty string. +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} - ch = text.charAt(at); - at += 1; - return ch; - }, - number = function () { - // Parse a number value. +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} - var number, - string = ''; +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} - if (ch === '-') { - string = '-'; - next('-'); - } - while (ch >= '0' && ch <= '9') { - string += ch; - next(); - } - if (ch === '.') { - string += '.'; - while (next() && ch >= '0' && ch <= '9') { - string += ch; - } - } - if (ch === 'e' || ch === 'E') { - string += ch; - next(); - if (ch === '-' || ch === '+') { - string += ch; - next(); - } - while (ch >= '0' && ch <= '9') { - string += ch; - next(); - } - } - number = +string; - if (!isFinite(number)) { - error('Bad number'); - } else { - if (BigNumber == null) BigNumber = __nccwpck_require__(7558); - //if (number > 9007199254740992 || number < -9007199254740992) - // Bignumber has stricter check: everything with length > 15 digits disallowed - if (string.length > 15) - return _options.storeAsString - ? string - : _options.useNativeBigInt - ? BigInt(string) - : new BigNumber(string); - else - return !_options.alwaysParseAsBig - ? number - : _options.useNativeBigInt - ? BigInt(number) - : new BigNumber(number); - } - }, - string = function () { - // Parse a string value. +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} - var hex, - i, - string = '', - uffff; +function createNoneSigner() { + return function sign() { + return ''; + } +} - // When parsing for string values, we must look for " and \ characters. +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} - if (ch === '"') { - var startAt = at; - while (next()) { - if (ch === '"') { - if (at - 1 > startAt) string += text.substring(startAt, at - 1); - next(); - return string; - } - if (ch === '\\') { - if (at - 1 > startAt) string += text.substring(startAt, at - 1); - next(); - if (ch === 'u') { - uffff = 0; - for (i = 0; i < 4; i += 1) { - hex = parseInt(next(), 16); - if (!isFinite(hex)) { - break; - } - uffff = uffff * 16 + hex; - } - string += String.fromCharCode(uffff); - } else if (typeof escapee[ch] === 'string') { - string += escapee[ch]; - } else { - break; - } - startAt = at; - } - } - } - error('Bad string'); - }, - white = function () { - // Skip whitespace. +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; - while (ch && ch <= ' ') { - next(); - } - }, - word = function () { - // true, false, or null. + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; - switch (ch) { - case 't': - next('t'); - next('r'); - next('u'); - next('e'); - return true; - case 'f': - next('f'); - next('a'); - next('l'); - next('s'); - next('e'); - return false; - case 'n': - next('n'); - next('u'); - next('l'); - next('l'); - return null; - } - error("Unexpected '" + ch + "'"); - }, - value, // Place holder for the value function. - array = function () { - // Parse an array value. - var array = []; +/***/ }), + +/***/ 4636: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(73334); +var VerifyStream = __nccwpck_require__(5522); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; - if (ch === '[') { - next('['); - white(); - if (ch === ']') { - next(']'); - return array; // empty array - } - while (ch) { - array.push(value()); - white(); - if (ch === ']') { - next(']'); - return array; - } - next(','); - white(); - } - } - error('Bad array'); - }, - object = function () { - // Parse an object value. +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; - var key, - object = Object.create(null); - if (ch === '{') { - next('{'); - white(); - if (ch === '}') { - next('}'); - return object; // empty object - } - while (ch) { - key = string(); - white(); - next(':'); - if ( - _options.strict === true && - Object.hasOwnProperty.call(object, key) - ) { - error('Duplicate key "' + key + '"'); - } +/***/ }), - if (suspectProtoRx.test(key) === true) { - if (_options.protoAction === 'error') { - error('Object contains forbidden prototype property'); - } else if (_options.protoAction === 'ignore') { - value(); - } else { - object[key] = value(); - } - } else if (suspectConstructorRx.test(key) === true) { - if (_options.constructorAction === 'error') { - error('Object contains forbidden constructor property'); - } else if (_options.constructorAction === 'ignore') { - value(); - } else { - object[key] = value(); - } - } else { - object[key] = value(); - } +/***/ 61868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - white(); - if (ch === '}') { - next('}'); - return object; - } - next(','); - white(); - } - } - error('Bad object'); - }; +/*global module, process*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var Stream = __nccwpck_require__(12781); +var util = __nccwpck_require__(73837); - value = function () { - // Parse a JSON value. It could be an object, an array, a string, a number, - // or a word. +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; - white(); - switch (ch) { - case '{': - return object(); - case '[': - return array(); - case '"': - return string(); - case '-': - return number(); - default: - return ch >= '0' && ch <= '9' ? number() : word(); - } - }; + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } - // Return the json_parse function. It will have access to all of the above - // functions and variables. + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } - return function (source, reviver) { - var result; + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } - text = source + ''; - at = 0; - ch = ' '; - result = value(); - white(); - if (ch) { - error('Syntax error'); - } + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); - // If there is a reviver function, we recursively walk the new structure, - // passing each name/value pair to the reviver function for possible - // transformation, starting with a temporary root object that holds the result - // in an empty key. If there is not a reviver function, we simply return the - // result. +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; - return typeof reviver === 'function' - ? (function walk(holder, key) { - var k, - v, - value = holder[key]; - if (value && typeof value === 'object') { - Object.keys(value).forEach(function (k) { - v = walk(value, k); - if (v !== undefined) { - value[k] = v; - } else { - delete value[k]; - } - }); - } - return reviver.call(holder, key, value); - })({ '': result }, '') - : result; - }; +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; }; -module.exports = json_parse; +module.exports = DataStream; /***/ }), -/***/ 8574: +/***/ 73334: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var BigNumber = __nccwpck_require__(7558); +/*global module*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var DataStream = __nccwpck_require__(61868); +var jwa = __nccwpck_require__(96010); +var Stream = __nccwpck_require__(12781); +var toString = __nccwpck_require__(65292); +var util = __nccwpck_require__(73837); -/* - json2.js - 2013-05-26 +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} - Public Domain. +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} - See http://www.JSON.org/js.html +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. +SignStream.sign = jwsSign; +module.exports = SignStream; - This file creates a global JSON object containing two methods: stringify - and parse. - JSON.stringify(value, replacer, space) - value any JavaScript value, usually an object or array. +/***/ }), - replacer an optional parameter that determines how object - values are stringified for objects. It can be a - function or an array of strings. +/***/ 65292: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - space an optional parameter that specifies the indentation - of nested structures. If it is omitted, the text will - be packed without extra whitespace. If it is a number, - it will specify the number of spaces to indent at each - level. If it is a string (such as '\t' or ' '), - it contains the characters used to indent at each level. +/*global module*/ +var Buffer = (__nccwpck_require__(14300).Buffer); - This method produces a JSON text from a JavaScript value. +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; - When an object value is found, if the object contains a toJSON - method, its toJSON method will be called and the result will be - stringified. A toJSON method does not serialize: it returns the - value represented by the name/value pair that should be serialized, - or undefined if nothing should be serialized. The toJSON method - will be passed the key associated with the value, and this will be - bound to the value - For example, this would serialize Dates as ISO strings. +/***/ }), - Date.prototype.toJSON = function (key) { - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } +/***/ 5522: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return this.getUTCFullYear() + '-' + - f(this.getUTCMonth() + 1) + '-' + - f(this.getUTCDate()) + 'T' + - f(this.getUTCHours()) + ':' + - f(this.getUTCMinutes()) + ':' + - f(this.getUTCSeconds()) + 'Z'; - }; +/*global module*/ +var Buffer = (__nccwpck_require__(21867).Buffer); +var DataStream = __nccwpck_require__(61868); +var jwa = __nccwpck_require__(96010); +var Stream = __nccwpck_require__(12781); +var toString = __nccwpck_require__(65292); +var util = __nccwpck_require__(73837); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; - You can provide an optional replacer method. It will be passed the - key and value of each member, with this bound to the containing - object. The value that is returned from your method will be - serialized. If your method returns undefined, then the member will - be excluded from the serialization. +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} - If the replacer parameter is an array of strings, then it will be - used to select the members to be serialized. It filters the results - such that only members with keys listed in the replacer array are - stringified. +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} - Values that do not have JSON representations, such as undefined or - functions, will not be serialized. Such values in objects will be - dropped; in arrays they will be replaced with null. You can use - a replacer function to replace those with JSON values. - JSON.stringify(undefined) returns undefined. +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} - The optional space parameter produces a stringification of the - value that is filled with line breaks and indentation to make it - easier to read. +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} - If the space parameter is a non-empty string, then that string will - be used for indentation. If the space parameter is a number, then - the indentation will be that many spaces. +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} - Example: +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} - text = JSON.stringify(['e', {pluribus: 'unum'}]); - // text is '["e",{"pluribus":"unum"}]' +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} - text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); - // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); - text = JSON.stringify([new Date()], function (key, value) { - return this[key] instanceof Date ? - 'Date(' + this[key] + ')' : value; - }); - // text is '["Date(---current time---)"]' + if (!isValidJws(jwsSig)) + return null; + var header = headerFromJWS(jwsSig); - JSON.parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. + if (!header) + return null; - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); - Example: + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); - myData = JSON.parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; - myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { - var d; - if (typeof value === 'string' && - value.slice(0, 5) === 'Date(' && - value.slice(-1) === ')') { - d = new Date(value.slice(5, -1)); - if (d) { - return d; - } - } - return value; - }); +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; +module.exports = VerifyStream; - This is a reference implementation. You are free to copy, modify, or - redistribute. -*/ -/*jslint evil: true, regexp: true */ +/***/ }), -/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, - call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, - getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, - lastIndex, length, parse, prototype, push, replace, slice, stringify, - test, toJSON, toString, valueOf -*/ +/***/ 47426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = __nccwpck_require__(53765) + + +/***/ }), + +/***/ 43583: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ +var db = __nccwpck_require__(47426) +var extname = (__nccwpck_require__(71017).extname) -// Create a JSON object only if one does not already exist. We create the -// methods in a closure to avoid creating global variables. +/** + * Module variables. + * @private + */ -var JSON = module.exports; +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i -(function () { - 'use strict'; +/** + * Module exports. + * @public + */ - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) - var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - gap, - indent, - meta = { // table of character substitutions - '\b': '\\b', - '\t': '\\t', - '\n': '\\n', - '\f': '\\f', - '\r': '\\r', - '"' : '\\"', - '\\': '\\\\' - }, - rep; +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ - function quote(string) { +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } -// If the string contains no control characters, no quote characters, and no -// backslash characters, then we can safely slap some quotes around it. -// Otherwise we must also replace the offending characters with safe escape -// sequences. + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] - escapable.lastIndex = 0; - return escapable.test(string) ? '"' + string.replace(escapable, function (a) { - var c = meta[a]; - return typeof c === 'string' - ? c - : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); - }) + '"' : '"' + string + '"'; - } + if (mime && mime.charset) { + return mime.charset + } + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } - function str(key, holder) { + return false +} -// Produce a string from holder[key]. +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ - var i, // The loop counter. - k, // The member key. - v, // The member value. - length, - mind = gap, - partial, - value = holder[key], - isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } -// If the value has a toJSON method, call it to obtain a replacement value. + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str - if (value && typeof value === 'object' && - typeof value.toJSON === 'function') { - value = value.toJSON(key); - } + if (!mime) { + return false + } -// If we were called with a replacer function, then call the replacer to -// obtain a replacement value. + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } - if (typeof rep === 'function') { - value = rep.call(holder, key, value); - } + return mime +} -// What happens next depends on the value's type. +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ - switch (typeof value) { - case 'string': - if (isBigNumber) { - return value; - } else { - return quote(value); - } +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } - case 'number': + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) -// JSON numbers must be finite. Encode non-finite numbers as null. + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] - return isFinite(value) ? String(value) : 'null'; + if (!exts || !exts.length) { + return false + } - case 'boolean': - case 'null': - case 'bigint': + return exts[0] +} -// If the value is a boolean or null, convert it to a string. Note: -// typeof null does not produce 'null'. The case is included here in -// the remote chance that this gets fixed someday. +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ - return String(value); +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } -// If the type is 'object', we might be dealing with an object or an array or -// null. + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) - case 'object': + if (!extension) { + return false + } -// Due to a specification blunder in ECMAScript, typeof null is 'object', -// so watch out for that case. + return exports.types[extension] || false +} - if (!value) { - return 'null'; - } +/** + * Populate the extensions and types maps. + * @private + */ -// Make an array to hold the partial results of stringifying this object value. +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] - gap += indent; - partial = []; + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions -// Is the value an array? + if (!exts || !exts.length) { + return + } - if (Object.prototype.toString.apply(value) === '[object Array]') { + // mime -> extensions + extensions[type] = exts -// The value is an array. Stringify every element. Use null as a placeholder -// for non-JSON values. + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] - length = value.length; - for (i = 0; i < length; i += 1) { - partial[i] = str(i, value) || 'null'; - } + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) -// Join all of the elements together, separated with commas, and wrap them in -// brackets. + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } - v = partial.length === 0 - ? '[]' - : gap - ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' - : '[' + partial.join(',') + ']'; - gap = mind; - return v; - } + // set the extension -> mime + types[extension] = type + } + }) +} -// If the replacer is an array, use it to select the members to be stringified. - if (rep && typeof rep === 'object') { - length = rep.length; - for (i = 0; i < length; i += 1) { - if (typeof rep[i] === 'string') { - k = rep[i]; - v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - } - } - } else { +/***/ }), -// Otherwise, iterate through all of the keys in the object. +/***/ 46038: +/***/ ((module) => { - Object.keys(value).forEach(function(k) { - var v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - }); - } +"use strict"; -// Join all of the member texts together, separated with commas, -// and wrap them in braces. - v = partial.length === 0 - ? '{}' - : gap - ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' - : '{' + partial.join(',') + '}'; - gap = mind; - return v; - } - } +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); -// If the JSON object does not yet have a stringify method, give it one. + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } - if (typeof JSON.stringify !== 'function') { - JSON.stringify = function (value, replacer, space) { + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} -// The stringify method takes a value and an optional replacer, and an optional -// space parameter, and returns a JSON text. The replacer can be a function -// that can replace values, or an array of strings that will select the keys. -// A default replacer method can be provided. Use of the space parameter can -// produce text that is more easily readable. +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); - var i; - gap = ''; - indent = ''; + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; -// If the space parameter is a number, make an indent string containing that -// many spaces. + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } - if (typeof space === 'number') { - for (i = 0; i < space; i += 1) { - indent += ' '; - } + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } -// If the space parameter is a string, it will be used as the indent string. + this._types[ext] = type; + } - } else if (typeof space === 'string') { - indent = space; - } + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; -// If there is a replacer, it must be a function or an array. -// Otherwise, throw an error. +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); - rep = replacer; - if (replacer && typeof replacer !== 'function' && - (typeof replacer !== 'object' || - typeof replacer.length !== 'number')) { - throw new Error('JSON.stringify'); - } + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; -// Make a fake root object containing our value under the key of ''. -// Return the result of stringifying the value. + return (hasDot || !hasPath) && this._types[ext] || null; +}; - return str('', {'': value}); - }; - } -}()); +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; /***/ }), -/***/ 6010: +/***/ 29994: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var bufferEqual = __nccwpck_require__(9239); -var Buffer = (__nccwpck_require__(1867).Buffer); -var crypto = __nccwpck_require__(6113); -var formatEcdsa = __nccwpck_require__(1728); -var util = __nccwpck_require__(3837); +"use strict"; -var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' -var MSG_INVALID_SECRET = 'secret must be a string or buffer'; -var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; -var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; -var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; -if (supportsKeyObjects) { - MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; - MSG_INVALID_SECRET += 'or a KeyObject'; -} +let Mime = __nccwpck_require__(46038); +module.exports = new Mime(__nccwpck_require__(13114), __nccwpck_require__(38809)); -function checkIsPublicKey(key) { - if (Buffer.isBuffer(key)) { - return; - } - if (typeof key === 'string') { - return; - } +/***/ }), - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +/***/ 38809: +/***/ ((module) => { - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; - if (typeof key.type !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +/***/ }), - if (typeof key.asymmetricKeyType !== 'string') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } +/***/ 13114: +/***/ ((module) => { - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_VERIFIER_KEY); - } -}; +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; -function checkIsPrivateKey(key) { - if (Buffer.isBuffer(key)) { - return; - } +/***/ }), - if (typeof key === 'string') { - return; - } +/***/ 83973: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (typeof key === 'object') { - return; - } +module.exports = minimatch +minimatch.Minimatch = Minimatch - throw typeError(MSG_INVALID_SIGNER_KEY); -}; +var path = (function () { try { return __nccwpck_require__(71017) } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep -function checkIsSecretKey(key) { - if (Buffer.isBuffer(key)) { - return; - } +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __nccwpck_require__(33717) - if (typeof key === 'string') { - return key; - } +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} - if (!supportsKeyObjects) { - throw typeError(MSG_INVALID_SECRET); - } +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' - if (typeof key !== 'object') { - throw typeError(MSG_INVALID_SECRET); - } +// * => any number of characters +var star = qmark + '*?' - if (key.type !== 'secret') { - throw typeError(MSG_INVALID_SECRET); - } +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - if (typeof key.export !== 'function') { - throw typeError(MSG_INVALID_SECRET); - } -} +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' -function fromBase64(base64) { - return base64 - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) } -function toBase64(base64url) { - base64url = base64url.toString(); +// normalizes slashes. +var slashSplit = /\/+/ - var padding = 4 - base64url.length % 4; - if (padding !== 4) { - for (var i = 0; i < padding; ++i) { - base64url += '='; - } +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) } - - return base64url - .replace(/\-/g, '+') - .replace(/_/g, '/'); } -function typeError(template) { - var args = [].slice.call(arguments, 1); - var errMsg = util.format.bind(util, template).apply(null, args); - return new TypeError(errMsg); +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t } -function bufferOrString(obj) { - return Buffer.isBuffer(obj) || typeof obj === 'string'; -} +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } -function normalizeInput(thing) { - if (!bufferOrString(thing)) - thing = JSON.stringify(thing); - return thing; -} + var orig = minimatch -function createHmacSigner(bits) { - return function sign(thing, secret) { - checkIsSecretKey(secret); - thing = normalizeInput(thing); - var hmac = crypto.createHmac('sha' + bits, secret); - var sig = (hmac.update(thing), hmac.digest('base64')) - return fromBase64(sig); + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) } -} -function createHmacVerifier(bits) { - return function verify(thing, signature, secret) { - var computedSig = createHmacSigner(bits)(thing, secret); - return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch } -} -function createKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - // Even though we are specifying "RSA" here, this works with ECDSA - // keys as well. - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); - return fromBase64(sig); + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) } -} -function createKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify(publicKey, signature, 'base64'); + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) } -} -function createPSSKeySigner(bits) { - return function sign(thing, privateKey) { - checkIsPrivateKey(privateKey); - thing = normalizeInput(thing); - var signer = crypto.createSign('RSA-SHA' + bits); - var sig = (signer.update(thing), signer.sign({ - key: privateKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, 'base64')); - return fromBase64(sig); + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) } -} -function createPSSKeyVerifier(bits) { - return function verify(thing, signature, publicKey) { - checkIsPublicKey(publicKey); - thing = normalizeInput(thing); - signature = toBase64(signature); - var verifier = crypto.createVerify('RSA-SHA' + bits); - verifier.update(thing); - return verifier.verify({ - key: publicKey, - padding: crypto.constants.RSA_PKCS1_PSS_PADDING, - saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST - }, signature, 'base64'); + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) } -} -function createECDSASigner(bits) { - var inner = createKeySigner(bits); - return function sign() { - var signature = inner.apply(null, arguments); - signature = formatEcdsa.derToJose(signature, 'ES' + bits); - return signature; - }; -} + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } -function createECDSAVerifer(bits) { - var inner = createKeyVerifier(bits); - return function verify(thing, signature, publicKey) { - signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); - var result = inner(thing, signature, publicKey); - return result; - }; + return m } -function createNoneSigner() { - return function sign() { - return ''; - } +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch } -function createNoneVerifier() { - return function verify(thing, signature) { - return signature === ''; - } -} +function minimatch (p, pattern, options) { + assertValidPattern(pattern) -module.exports = function jwa(algorithm) { - var signerFactories = { - hs: createHmacSigner, - rs: createKeySigner, - ps: createPSSKeySigner, - es: createECDSASigner, - none: createNoneSigner, - } - var verifierFactories = { - hs: createHmacVerifier, - rs: createKeyVerifier, - ps: createPSSKeyVerifier, - es: createECDSAVerifer, - none: createNoneVerifier, - } - var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); - if (!match) - throw typeError(MSG_INVALID_ALGORITHM, algorithm); - var algo = (match[1] || match[3]).toLowerCase(); - var bits = match[2]; + if (!options) options = {} - return { - sign: signerFactories[algo](bits), - verify: verifierFactories[algo](bits), + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false } -}; - - -/***/ }), -/***/ 4636: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return new Minimatch(pattern, options).match(p) +} -/*global exports*/ -var SignStream = __nccwpck_require__(3334); -var VerifyStream = __nccwpck_require__(5522); +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } -var ALGORITHMS = [ - 'HS256', 'HS384', 'HS512', - 'RS256', 'RS384', 'RS512', - 'PS256', 'PS384', 'PS512', - 'ES256', 'ES384', 'ES512' -]; + assertValidPattern(pattern) -exports.ALGORITHMS = ALGORITHMS; -exports.sign = SignStream.sign; -exports.verify = VerifyStream.verify; -exports.decode = VerifyStream.decode; -exports.isValid = VerifyStream.isValid; -exports.createSign = function createSign(opts) { - return new SignStream(opts); -}; -exports.createVerify = function createVerify(opts) { - return new VerifyStream(opts); -}; + if (!options) options = {} + pattern = pattern.trim() -/***/ }), + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } -/***/ 1868: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial -/*global module, process*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var Stream = __nccwpck_require__(2781); -var util = __nccwpck_require__(3837); + // make the set of regexps etc. + this.make() +} -function DataStream(data) { - this.buffer = null; - this.writable = true; - this.readable = true; +Minimatch.prototype.debug = function () {} - // No input - if (!data) { - this.buffer = Buffer.alloc(0); - return this; - } +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options - // Stream - if (typeof data.pipe === 'function') { - this.buffer = Buffer.alloc(0); - data.pipe(this); - return this; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return } - - // Buffer or String - // or Object (assumedly a passworded key) - if (data.length || typeof data === 'object') { - this.buffer = data; - this.writable = false; - process.nextTick(function () { - this.emit('end', data); - this.readable = false; - this.emit('close'); - }.bind(this)); - return this; + if (!pattern) { + this.empty = true + return } - throw new TypeError('Unexpected data type ('+ typeof data + ')'); -} -util.inherits(DataStream, Stream); + // step 1: figure out negation, etc. + this.parseNegate() -DataStream.prototype.write = function write(data) { - this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); - this.emit('data', data); -}; + // step 2: expand braces + var set = this.globSet = this.braceExpand() -DataStream.prototype.end = function end(data) { - if (data) - this.write(data); - this.emit('end', data); - this.emit('close'); - this.writable = false; - this.readable = false; -}; + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } -module.exports = DataStream; + this.debug(this.pattern, set) + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) -/***/ }), + this.debug(this.pattern, set) -/***/ 3334: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) -/*global module*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var DataStream = __nccwpck_require__(1868); -var jwa = __nccwpck_require__(6010); -var Stream = __nccwpck_require__(2781); -var toString = __nccwpck_require__(5292); -var util = __nccwpck_require__(3837); + this.debug(this.pattern, set) -function base64url(string, encoding) { - return Buffer - .from(string, encoding) - .toString('base64') - .replace(/=/g, '') - .replace(/\+/g, '-') - .replace(/\//g, '_'); -} + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) -function jwsSecuredInput(header, payload, encoding) { - encoding = encoding || 'utf8'; - var encodedHeader = base64url(toString(header), 'binary'); - var encodedPayload = base64url(toString(payload), encoding); - return util.format('%s.%s', encodedHeader, encodedPayload); -} + this.debug(this.pattern, set) -function jwsSign(opts) { - var header = opts.header; - var payload = opts.payload; - var secretOrKey = opts.secret || opts.privateKey; - var encoding = opts.encoding; - var algo = jwa(header.alg); - var securedInput = jwsSecuredInput(header, payload, encoding); - var signature = algo.sign(securedInput, secretOrKey); - return util.format('%s.%s', securedInput, signature); + this.set = set } -function SignStream(opts) { - var secret = opts.secret||opts.privateKey||opts.key; - var secretStream = new DataStream(secret); - this.readable = true; - this.header = opts.header; - this.encoding = opts.encoding; - this.secret = this.privateKey = this.key = secretStream; - this.payload = new DataStream(opts.payload); - this.secret.once('close', function () { - if (!this.payload.writable && this.readable) - this.sign(); - }.bind(this)); +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 - this.payload.once('close', function () { - if (!this.secret.writable && this.readable) - this.sign(); - }.bind(this)); -} -util.inherits(SignStream, Stream); + if (options.nonegate) return -SignStream.prototype.sign = function sign() { - try { - var signature = jwsSign({ - header: this.header, - payload: this.payload.buffer, - secret: this.secret.buffer, - encoding: this.encoding - }); - this.emit('done', signature); - this.emit('data', signature); - this.emit('end'); - this.readable = false; - return signature; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ } -}; - -SignStream.sign = jwsSign; - -module.exports = SignStream; - - -/***/ }), - -/***/ 5292: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/*global module*/ -var Buffer = (__nccwpck_require__(4300).Buffer); -module.exports = function toString(obj) { - if (typeof obj === 'string') - return obj; - if (typeof obj === 'number' || Buffer.isBuffer(obj)) - return obj.toString(); - return JSON.stringify(obj); -}; + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} -/***/ }), +Minimatch.prototype.braceExpand = braceExpand -/***/ 5522: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } -/*global module*/ -var Buffer = (__nccwpck_require__(1867).Buffer); -var DataStream = __nccwpck_require__(1868); -var jwa = __nccwpck_require__(6010); -var Stream = __nccwpck_require__(2781); -var toString = __nccwpck_require__(5292); -var util = __nccwpck_require__(3837); -var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern -function isObject(thing) { - return Object.prototype.toString.call(thing) === '[object Object]'; -} + assertValidPattern(pattern) -function safeJsonParse(thing) { - if (isObject(thing)) - return thing; - try { return JSON.parse(thing); } - catch (e) { return undefined; } -} + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } -function headerFromJWS(jwsSig) { - var encodedHeader = jwsSig.split('.', 1)[0]; - return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); + return expand(pattern) } -function securedInputFromJWS(jwsSig) { - return jwsSig.split('.', 2).join('.'); -} +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } -function signatureFromJWS(jwsSig) { - return jwsSig.split('.')[2]; + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } } -function payloadFromJWS(jwsSig, encoding) { - encoding = encoding || 'utf8'; - var payload = jwsSig.split('.')[1]; - return Buffer.from(payload, 'base64').toString(encoding); -} +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) -function isValidJws(string) { - return JWS_REGEX.test(string) && !!headerFromJWS(string); -} + var options = this.options -function jwsVerify(jwsSig, algorithm, secretOrKey) { - if (!algorithm) { - var err = new Error("Missing algorithm parameter for jws.verify"); - err.code = "MISSING_ALGORITHM"; - throw err; + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' } - jwsSig = toString(jwsSig); - var signature = signatureFromJWS(jwsSig); - var securedInput = securedInputFromJWS(jwsSig); - var algo = jwa(algorithm); - return algo.verify(securedInput, signature, secretOrKey); -} + if (pattern === '') return '' -function jwsDecode(jwsSig, opts) { - opts = opts || {}; - jwsSig = toString(jwsSig); + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this - if (!isValidJws(jwsSig)) - return null; + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } - var header = headerFromJWS(jwsSig); + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) - if (!header) - return null; + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } - var payload = payloadFromJWS(jwsSig); - if (header.typ === 'JWT' || opts.json) - payload = JSON.parse(payload, opts.encoding); + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } - return { - header: header, - payload: payload, - signature: signatureFromJWS(jwsSig) - }; -} + case '\\': + clearStateChar() + escaping = true + continue -function VerifyStream(opts) { - opts = opts || {}; - var secretOrKey = opts.secret||opts.publicKey||opts.key; - var secretStream = new DataStream(secretOrKey); - this.readable = true; - this.algorithm = opts.algorithm; - this.encoding = opts.encoding; - this.secret = this.publicKey = this.key = secretStream; - this.signature = new DataStream(opts.signature); - this.secret.once('close', function () { - if (!this.signature.writable && this.readable) - this.verify(); - }.bind(this)); + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - this.signature.once('close', function () { - if (!this.secret.writable && this.readable) - this.verify(); - }.bind(this)); -} -util.inherits(VerifyStream, Stream); -VerifyStream.prototype.verify = function verify() { - try { - var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); - var obj = jwsDecode(this.signature.buffer, this.encoding); - this.emit('done', valid, obj); - this.emit('data', valid); - this.emit('end'); - this.readable = false; - return valid; - } catch (e) { - this.readable = false; - this.emit('error', e); - this.emit('close'); - } -}; + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } -VerifyStream.decode = jwsDecode; -VerifyStream.isValid = isValidJws; -VerifyStream.verify = jwsVerify; + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue -module.exports = VerifyStream; + case '(': + if (inClass) { + re += '(' + continue + } + if (!stateChar) { + re += '\\(' + continue + } -/***/ }), + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue -/***/ 7426: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } -/*! - * mime-db - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015-2022 Douglas Christopher Wilson - * MIT Licensed - */ + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue -/** - * Module exports. - */ + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } -module.exports = __nccwpck_require__(3765) + clearStateChar() + re += '|' + continue + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() -/***/ }), + if (inClass) { + re += '\\' + c + continue + } -/***/ 3583: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + inClass = true + classStart = i + reClassStart = re.length + re += c + continue -"use strict"; -/*! - * mime-types - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + // finish up the class. + hasMagic = true + inClass = false + re += c + continue -/** - * Module dependencies. - * @private - */ + default: + // swallow any state char that wasn't consumed + clearStateChar() -var db = __nccwpck_require__(7426) -var extname = (__nccwpck_require__(1017).extname) + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } -/** - * Module variables. - * @private - */ + re += c -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ -var TEXT_TYPE_REGEXP = /^text\//i + } // switch + } // for -/** - * Module exports. - * @public - */ + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } -exports.charset = charset -exports.charsets = { lookup: charset } -exports.contentType = contentType -exports.extension = extension -exports.extensions = Object.create(null) -exports.lookup = lookup -exports.types = Object.create(null) + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } -// Populate the extensions/types maps -populateMaps(exports.extensions, exports.types) + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) -/** - * Get the default charset for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type -function charset (type) { - if (!type || typeof type !== 'string') { - return false + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail } - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && db[match[1].toLowerCase()] - - if (mime && mime.charset) { - return mime.charset + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' } - // default text/* to utf-8 - if (match && TEXT_TYPE_REGEXP.test(match[1])) { - return 'UTF-8' + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true } - return false -} + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] -/** - * Create a full Content-Type header given a MIME type or extension. - * - * @param {string} str - * @return {boolean|string} - */ + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) -function contentType (str) { - // TODO: should this even be in this module? - if (!str || typeof str !== 'string') { - return false - } + nlLast += nlAfter - var mime = str.indexOf('/') === -1 - ? exports.lookup(str) - : str + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter - if (!mime) { - return false + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe } - // TODO: use content-type or other module - if (mime.indexOf('charset') === -1) { - var charset = exports.charset(mime) - if (charset) mime += '; charset=' + charset.toLowerCase() + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re } - return mime -} - -/** - * Get the default extension for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ - -function extension (type) { - if (!type || typeof type !== 'string') { - return false + if (addPatternStart) { + re = patternStart + re } - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - - // get extensions - var exts = match && exports.extensions[match[1].toLowerCase()] - - if (!exts || !exts.length) { - return false + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] } - return exts[0] -} - -/** - * Lookup the MIME type for a file path/extension. - * - * @param {string} path - * @return {boolean|string} - */ + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } -function lookup (path) { - if (!path || typeof path !== 'string') { - return false + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') } - // get the extension ("ext" or ".ext" or full path) - var extension = extname('x.' + path) - .toLowerCase() - .substr(1) + regExp._glob = pattern + regExp._src = re - if (!extension) { - return false - } + return regExp +} - return exports.types[extension] || false +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() } -/** - * Populate the extensions and types maps. - * @private - */ +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp -function populateMaps (extensions, types) { - // source preference (least -> most) - var preference = ['nginx', 'apache', undefined, 'iana'] + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set - Object.keys(db).forEach(function forEachMimeType (type) { - var mime = db[type] - var exts = mime.extensions + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options - if (!exts || !exts.length) { - return - } + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' - // mime -> extensions - extensions[type] = exts + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') - // extension -> mime - for (var i = 0; i < exts.length; i++) { - var extension = exts[i] + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' - if (types[extension]) { - var from = preference.indexOf(db[types[extension]].source) - var to = preference.indexOf(mime.source) + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' - if (types[extension] !== 'application/octet-stream' && - (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { - // skip the remapping - continue - } - } + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} - // set the extension -> mime - types[extension] = type - } +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list } +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' -/***/ }), - -/***/ 6038: -/***/ ((module) => { - -"use strict"; - + if (f === '/' && partial) return true -/** - * @param typeMap [Object] Map of MIME type -> Array[extensions] - * @param ... - */ -function Mime() { - this._types = Object.create(null); - this._extensions = Object.create(null); + var options = this.options - for (let i = 0; i < arguments.length; i++) { - this.define(arguments[i]); + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') } - this.define = this.define.bind(this); - this.getType = this.getType.bind(this); - this.getExtension = this.getExtension.bind(this); -} - -/** - * Define mimetype -> extension mappings. Each key is a mime-type that maps - * to an array of extensions associated with the type. The first extension is - * used as the default extension for the type. - * - * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); - * - * If a type declares an extension that has already been defined, an error will - * be thrown. To suppress this error and force the extension to be associated - * with the new type, pass `force`=true. Alternatively, you may prefix the - * extension with "*" to map the type to extension, without mapping the - * extension to the type. - * - * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); - * - * - * @param map (Object) type definitions - * @param force (Boolean) if true, force overriding of existing definitions - */ -Mime.prototype.define = function(typeMap, force) { - for (let type in typeMap) { - let extensions = typeMap[type].map(function(t) { - return t.toLowerCase(); - }); - type = type.toLowerCase(); + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) - for (let i = 0; i < extensions.length; i++) { - const ext = extensions[i]; + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. - // '*' prefix = not the preferred type for this extension. So fixup the - // extension, and skip it. - if (ext[0] === '*') { - continue; - } + var set = this.set + this.debug(this.pattern, 'set', set) - if (!force && (ext in this._types)) { - throw new Error( - 'Attempt to change mapping for "' + ext + - '" extension from "' + this._types[ext] + '" to "' + type + - '". Pass `force=true` to allow this, otherwise remove "' + ext + - '" from the list of extensions for "' + type + '".' - ); - } + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } - this._types[ext] = type; + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] } - - // Use first extension as default - if (force || !this._extensions[type]) { - const ext = extensions[0]; - this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate } } -}; - -/** - * Lookup a mime type based on extension - */ -Mime.prototype.getType = function(path) { - path = String(path); - let last = path.replace(/^.*[/\\]/, '').toLowerCase(); - let ext = last.replace(/^.*\./, '').toLowerCase(); - - let hasPath = last.length < path.length; - let hasDot = ext.length < last.length - 1; - return (hasDot || !hasPath) && this._types[ext] || null; -}; - -/** - * Return file extension associated with a mime type - */ -Mime.prototype.getExtension = function(type) { - type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; - return type && this._extensions[type.toLowerCase()] || null; -}; + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} -module.exports = Mime; +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) -/***/ }), + this.debug('matchOne', file.length, pattern.length) -/***/ 9994: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] -"use strict"; + this.debug(pattern, p, f) + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false -let Mime = __nccwpck_require__(6038); -module.exports = new Mime(__nccwpck_require__(3114), __nccwpck_require__(8809)); + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } -/***/ }), + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] -/***/ 8809: -/***/ ((module) => { + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) -module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } -/***/ }), + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } -/***/ 3114: -/***/ ((module) => { + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } -module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } -/***/ }), + if (!hit) return false + } -/***/ 3973: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* -module.exports = minimatch -minimatch.Minimatch = Minimatch + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } -var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || { - sep: '/' + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') } -minimatch.sep = path.sep - -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __nccwpck_require__(3717) -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') } -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} -// * => any number of characters -var star = qmark + '*?' -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' +/***/ }), -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' +/***/ 80900: +/***/ ((module) => { -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') +/** + * Helpers. + */ -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) -} +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; -// normalizes slashes. -var slashSplit = /\/+/ +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); } -} + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; -function ext (a, b) { - b = b || {} - var t = {} - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - return t -} +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ -minimatch.defaults = function (def) { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return minimatch +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; } +} - var orig = minimatch +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ - var m = function minimatch (p, pattern, options) { - return orig(p, pattern, ext(def, options)) +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; } - - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; } - m.Minimatch.defaults = function defaults (options) { - return orig.defaults(ext(def, options)).Minimatch + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; } - - m.filter = function filter (pattern, options) { - return orig.filter(pattern, ext(def, options)) + if (msAbs >= s) { + return Math.round(ms / s) + 's'; } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ - m.defaults = function defaults (options) { - return orig.defaults(ext(def, options)) +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); } - - m.makeRe = function makeRe (pattern, options) { - return orig.makeRe(pattern, ext(def, options)) + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); } - - m.braceExpand = function braceExpand (pattern, options) { - return orig.braceExpand(pattern, ext(def, options)) + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); } - - m.match = function (list, pattern, options) { - return orig.match(list, pattern, ext(def, options)) + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); } - - return m -} - -Minimatch.defaults = function (def) { - return minimatch.defaults(def).Minimatch + return ms + ' ms'; } -function minimatch (p, pattern, options) { - assertValidPattern(pattern) - - if (!options) options = {} - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } +/** + * Pluralization helper. + */ - return new Minimatch(pattern, options).match(p) +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); } -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } - assertValidPattern(pattern) +/***/ }), - if (!options) options = {} +/***/ 80467: +/***/ ((module, exports, __nccwpck_require__) => { - pattern = pattern.trim() +"use strict"; - // windows support: need to use /, not \ - if (!options.allowWindowsEscape && path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - this.partial = !!options.partial +Object.defineProperty(exports, "__esModule", ({ value: true })); - // make the set of regexps etc. - this.make() -} +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -Minimatch.prototype.debug = function () {} +var Stream = _interopDefault(__nccwpck_require__(12781)); +var http = _interopDefault(__nccwpck_require__(13685)); +var Url = _interopDefault(__nccwpck_require__(57310)); +var whatwgUrl = _interopDefault(__nccwpck_require__(28665)); +var https = _interopDefault(__nccwpck_require__(95687)); +var zlib = _interopDefault(__nccwpck_require__(59796)); -Minimatch.prototype.make = make -function make () { - var pattern = this.pattern - var options = this.options +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; - // step 1: figure out negation, etc. - this.parseNegate() +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); - // step 2: expand braces - var set = this.globSet = this.braceExpand() +class Blob { + constructor() { + this[TYPE] = ''; - if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + const blobParts = arguments[0]; + const options = arguments[1]; - this.debug(this.pattern, set) + const buffers = []; + let size = 0; - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } - this.debug(this.pattern, set) + this[BUFFER] = Buffer.concat(buffers); - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; - this.debug(this.pattern, set) + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} - this.debug(this.pattern, set) +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); - this.set = set -} +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ - if (options.nonegate) return +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; } - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); } -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) -} +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; -Minimatch.prototype.braceExpand = braceExpand +let convert; +try { + convert = (__nccwpck_require__(22877).convert); +} catch (e) {} -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options - } else { - options = {} - } - } +const INTERNALS = Symbol('Body internals'); - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; - assertValidPattern(pattern) +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern] - } + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; - return expand(pattern) -} + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; -var MAX_PATTERN_LENGTH = 1024 * 64 -var assertValidPattern = function (pattern) { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern') - } + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long') - } + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } } -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - assertValidPattern(pattern) +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, - var options = this.options + get bodyUsed() { + return this[INTERNALS].disturbed; + }, - // shortcuts - if (pattern === '**') { - if (!options.noglobstar) - return GLOBSTAR - else - pattern = '*' - } - if (pattern === '') return '' + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue - } + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, - switch (c) { - /* istanbul ignore next */ - case '/': { - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - } + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, - case '\\': - clearStateChar() - escaping = true - continue + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; - case '(': - if (inClass) { - re += '(' - continue - } + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } - if (!stateChar) { - re += '\\(' - continue - } + this[INTERNALS].disturbed = true; - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } + let body = this.body; - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } + // body is blob + if (isBlob(body)) { + body = body.stream(); + } - clearStateChar() - re += '|' - continue + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } - if (inClass) { - re += '\\' + c - continue - } + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; - inClass = true - classStart = i - reClassStart = re.length - re += c - continue + return new Body.Promise(function (resolve, reject) { + let resTimeout; - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); - // finish up the class. - hasMagic = true - inClass = false - re += c - continue + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } - default: - // swallow any state char that wasn't consumed - clearStateChar() + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } + accumBytes += chunk.length; + accum.push(chunk); + }); - re += c + body.on('end', function () { + if (abort) { + return; + } - } // switch - } // for + clearTimeout(resTimeout); - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } + // html5 + if (!res && str) { + res = / -1; n--) { - var nl = negativeLists[n] + if (res) { + res = /charset=(.*)/i.exec(res.pop()); + } + } - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) + // xml + if (!res && str) { + res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); + } - nlLast += nlAfter + // found charset + if (res) { + charset = res.pop(); - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter + // prevent decode issues when sites use incorrect encoding + // ref: https://hsivonen.fi/encoding-menu/ + if (charset === 'gb2312' || charset === 'gbk') { + charset = 'gb18030'; + } + } - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' - } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe - } + // turn raw buffers into a single utf-8 buffer + return convert(buffer, 'UTF-8', charset).toString(); +} - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } +/** + * Detect a URLSearchParams object + * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 + * + * @param Object obj Object to detect by type or brand + * @return String + */ +function isURLSearchParams(obj) { + // Duck-typing as a necessary condition. + if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { + return false; + } - if (addPatternStart) { - re = patternStart + re - } + // Brand-checking and more duck-typing as optional condition. + return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; +} - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } +/** + * Check if `obj` is a W3C `Blob` object (which `File` inherits from) + * @param {*} obj + * @return {boolean} + */ +function isBlob(obj) { + return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); +} - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } +/** + * Clone body given Res/Req instance + * + * @param Mixed instance Response or Request instance + * @return Mixed + */ +function clone(instance) { + let p1, p2; + let body = instance.body; - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) /* istanbul ignore next - should be impossible */ { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } + // don't allow cloning a used body + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used'); + } - regExp._glob = pattern - regExp._src = re + // check that body is a stream and not form-data object + // note: we can't clone the form-data object without having it as a dependency + if (body instanceof Stream && typeof body.getBoundary !== 'function') { + // tee instance body + p1 = new PassThrough(); + p2 = new PassThrough(); + body.pipe(p1); + body.pipe(p2); + // set instance body to teed body and return the other teed body + instance[INTERNALS].body = p1; + body = p2; + } - return regExp + return body; } -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() +/** + * Performs the operation "extract a `Content-Type` value from |object|" as + * specified in the specification: + * https://fetch.spec.whatwg.org/#concept-bodyinit-extract + * + * This function assumes that instance.body is present. + * + * @param Mixed instance Any options.body input + */ +function extractContentType(body) { + if (body === null) { + // body is null + return null; + } else if (typeof body === 'string') { + // body is string + return 'text/plain;charset=UTF-8'; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + return 'application/x-www-form-urlencoded;charset=UTF-8'; + } else if (isBlob(body)) { + // body is blob + return body.type || null; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return null; + } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + return null; + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + return null; + } else if (typeof body.getBoundary === 'function') { + // detect form data input from form-data module + return `multipart/form-data;boundary=${body.getBoundary()}`; + } else if (body instanceof Stream) { + // body is stream + // can't really do much about this + return null; + } else { + // Body constructor defaults other things to string + return 'text/plain;charset=UTF-8'; + } } -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp +/** + * The Fetch Standard treats this as if "total bytes" is a property on the body. + * For us, we have to explicitly get it with a function. + * + * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes + * + * @param Body instance Instance of Body + * @return Number? Number of bytes, or null if not possible + */ +function getTotalBytes(instance) { + const body = instance.body; + + + if (body === null) { + // body is null + return 0; + } else if (isBlob(body)) { + return body.size; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return body.length; + } else if (body && typeof body.getLengthSync === 'function') { + // detect form data input from form-data module + if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) { + // 2.x + return body.getLengthSync(); + } + return null; + } else { + // body is stream + return null; + } +} - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set +/** + * Write a Body to a Node.js WritableStream (e.g. http.Request) object. + * + * @param Body instance Instance of Body + * @return Void + */ +function writeToStream(dest, instance) { + const body = instance.body; - if (!set.length) { - this.regexp = false - return this.regexp - } - var options = this.options - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' + if (body === null) { + // body is null + dest.end(); + } else if (isBlob(body)) { + body.stream().pipe(dest); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream + body.pipe(dest); + } +} - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') +// expose Promise +Body.Promise = global.Promise; - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' +/** + * headers.js + * + * Headers class offers convenient helpers + */ - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' +const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; - try { - this.regexp = new RegExp(re, flags) - } catch (ex) /* istanbul ignore next - should be impossible */ { - this.regexp = false - } - return this.regexp +function validateName(name) { + name = `${name}`; + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`); + } } -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list +function validateValue(value) { + value = `${value}`; + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`); + } } -Minimatch.prototype.match = function match (f, partial) { - if (typeof partial === 'undefined') partial = this.partial - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true - - var options = this.options - - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } +/** + * Find the key in the map object given a header name. + * + * Returns undefined if not found. + * + * @param String name Header name + * @return String|Undefined + */ +function find(map, name) { + name = name.toLowerCase(); + for (const key in map) { + if (key.toLowerCase() === name) { + return key; + } + } + return undefined; +} - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) +const MAP = Symbol('map'); +class Headers { + /** + * Headers class + * + * @param Object headers Response headers + * @return Void + */ + constructor() { + let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. + this[MAP] = Object.create(null); - var set = this.set - this.debug(this.pattern, 'set', set) + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } - } + return; + } - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate -} + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } - this.debug('matchOne', file.length, pattern.length) + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] + return this[MAP][key].join(', '); + } - this.debug(pattern, p, f) + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - // should be impossible. - // some invalid regexp stuff in the set. - /* istanbul ignore if */ - if (p === false) return false + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - /* istanbul ignore if */ - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - hit = f === p - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } - if (!hit) return false - } + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else /* istanbul ignore else */ if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - return (fi === fl - 1) && (file[fi] === '') - } +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); - // should be unreachable. - /* istanbul ignore next */ - throw new Error('wtf?') -} +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); } -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; } +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; -/***/ }), + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } -/***/ 900: -/***/ ((module) => { + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); /** - * Helpers. + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} /** - * Parse or format the given `val`. - * - * Options: - * - * - `long` verbose formatting [false] + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public + * @param Object obj Object of headers + * @return Headers */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); -}; +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; /** - * Parse the given `str` and return milliseconds. + * Response class * - * @param {String} str - * @return {Number} - * @api private + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } } +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + /** - * Short format for `ms`. + * Wrapper around `new URL` to handle arbitrary URLs * - * @param {Number} ms - * @return {String} - * @api private + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; - } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; - } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; - } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; - } - return ms + 'ms'; + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); } +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + /** - * Long format for `ms`. + * Check if a value is an instance of Request. * - * @param {Number} ms - * @return {String} - * @api private + * @param Mixed input + * @return Boolean */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); - } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); - } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); - } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); - } - return ms + ' ms'; +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); } /** - * Pluralization helper. + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); -} + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); -/***/ }), + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } -/***/ 467: -/***/ ((module, exports, __nccwpck_require__) => { + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; -"use strict"; + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + const headers = new Headers(init.headers || input.headers || {}); -Object.defineProperty(exports, "__esModule", ({ value: true })); + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; -var Stream = _interopDefault(__nccwpck_require__(2781)); -var http = _interopDefault(__nccwpck_require__(3685)); -var Url = _interopDefault(__nccwpck_require__(7310)); -var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); -var https = _interopDefault(__nccwpck_require__(5687)); -var zlib = _interopDefault(__nccwpck_require__(9796)); + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); + get method() { + return this[INTERNALS$2].method; + } -class Blob { - constructor() { - this[TYPE] = ''; + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } - const blobParts = arguments[0]; - const options = arguments[1]; + get headers() { + return this[INTERNALS$2].headers; + } - const buffers = []; - let size = 0; + get redirect() { + return this[INTERNALS$2].redirect; + } - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); - this[BUFFER] = Buffer.concat(buffers); + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); } - get size() { - return this[BUFFER].length; + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); } - get type() { - return this[TYPE]; + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); } - text() { - return Promise.resolve(this[BUFFER].toString()); + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); } - toString() { - return '[object Blob]'; + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); } - slice() { - const size = this.size; - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); } -} -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} /** - * fetch-error.js + * abort-error.js * - * FetchError interface for operational errors + * AbortError interface for cancelled requests */ /** - * Create FetchError instance + * Create AbortError instance * * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError + * @return AbortError */ -function FetchError(message, type, systemError) { +function AbortError(message) { Error.call(this, message); + this.type = 'aborted'; this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } // hide custom error implementation details from end-users Error.captureStackTrace(this, this.constructor); } -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; -let convert; -try { - convert = (__nccwpck_require__(2877).convert); -} catch (e) {} +const URL$1 = Url.URL || whatwgUrl.URL; -const INTERNALS = Symbol('Body internals'); +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; /** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body + * isSameProtocol reports whether the two provided URLs use the same protocol. * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination */ -function Body(body) { - var _this = this; +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; + return orig === dest; +}; - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} + Body.Promise = fetch.Promise; -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); - get bodyUsed() { - return this[INTERNALS].disturbed; - }, + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, + let response = null; - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); }); - }, - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; } - }); - }, - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); }); - }, - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; + request.on('socket', function (s) { + socket = s; + }); - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; + request.on('response', function (response) { + const headers = response.headers; -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); } -}; +} /** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * Redirect code matching * - * @return Promise + * @param Number code Status code + * @return Boolean */ -function consumeBody() { - var _this4 = this; +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } +// expose Promise +fetch.Promise = global.Promise; - this[INTERNALS].disturbed = true; +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports["default"] = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - let body = this.body; +/***/ }), - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // body is blob - if (isBlob(body)) { - body = body.stream(); - } +var wrappy = __nccwpck_require__(62940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 57684: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(15185); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); } - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; + const queue = new Queue(); + let activeCount = 0; - return new Body.Promise(function (resolve, reject) { - let resTimeout; + const next = () => { + activeCount--; - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); + if (queue.size > 0) { + queue.dequeue()(); } + }; - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); + const run = async (fn, resolve, ...args) => { + activeCount++; - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } + const result = (async () => fn(...args))(); - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } + resolve(result); - accumBytes += chunk.length; - accum.push(chunk); - }); + try { + await result; + } catch {} - body.on('end', function () { - if (abort) { - return; - } + next(); + }; - clearTimeout(resTimeout); + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); } - }); + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); }); -} -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; + return generator; +}; - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } +module.exports = pLimit; - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - // html5 - if (!res && str) { - res = / { - if (res) { - res = /charset=(.*)/i.exec(res.pop()); - } - } +"use strict"; - // xml - if (!res && str) { - res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str); - } - // found charset - if (res) { - charset = res.pop(); +var parseUrl = (__nccwpck_require__(57310).parse); - // prevent decode issues when sites use incorrect encoding - // ref: https://hsivonen.fi/encoding-menu/ - if (charset === 'gb2312' || charset === 'gbk') { - charset = 'gb18030'; - } - } +var DEFAULT_PORTS = { + ftp: 21, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443, +}; - // turn raw buffers into a single utf-8 buffer - return convert(buffer, 'UTF-8', charset).toString(); -} +var stringEndsWith = String.prototype.endsWith || function(s) { + return s.length <= this.length && + this.indexOf(s, this.length - s.length) !== -1; +}; /** - * Detect a URLSearchParams object - * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 - * - * @param Object obj Object to detect by type or brand - * @return String + * @param {string|object} url - The URL, or the result from url.parse. + * @return {string} The URL of the proxy that should handle the request to the + * given URL. If no proxy is set, this will be an empty string. */ -function isURLSearchParams(obj) { - // Duck-typing as a necessary condition. - if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { - return false; - } +function getProxyForUrl(url) { + var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; + var proto = parsedUrl.protocol; + var hostname = parsedUrl.host; + var port = parsedUrl.port; + if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { + return ''; // Don't proxy URLs without a valid scheme or host. + } - // Brand-checking and more duck-typing as optional condition. - return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; -} + proto = proto.split(':', 1)[0]; + // Stripping ports in this way instead of using parsedUrl.hostname to make + // sure that the brackets around IPv6 addresses are kept. + hostname = hostname.replace(/:\d*$/, ''); + port = parseInt(port) || DEFAULT_PORTS[proto] || 0; + if (!shouldProxy(hostname, port)) { + return ''; // Don't proxy URLs that match NO_PROXY. + } -/** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} - */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); + var proxy = + getEnv('npm_config_' + proto + '_proxy') || + getEnv(proto + '_proxy') || + getEnv('npm_config_proxy') || + getEnv('all_proxy'); + if (proxy && proxy.indexOf('://') === -1) { + // Missing scheme in proxy, default to the requested URL's scheme. + proxy = proto + '://' + proxy; + } + return proxy; } /** - * Clone body given Res/Req instance + * Determines whether a given URL should be proxied. * - * @param Mixed instance Response or Request instance - * @return Mixed + * @param {string} hostname - The host name of the URL. + * @param {number} port - The effective port of the URL. + * @returns {boolean} Whether the given URL should be proxied. + * @private */ -function clone(instance) { - let p1, p2; - let body = instance.body; - - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } +function shouldProxy(hostname, port) { + var NO_PROXY = + (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); + if (!NO_PROXY) { + return true; // Always proxy if NO_PROXY is not set. + } + if (NO_PROXY === '*') { + return false; // Never proxy if wildcard is set. + } - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } + return NO_PROXY.split(/[,\s]/).every(function(proxy) { + if (!proxy) { + return true; // Skip zero-length hosts. + } + var parsedProxy = proxy.match(/^(.+):(\d+)$/); + var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; + var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; + if (parsedProxyPort && parsedProxyPort !== port) { + return true; // Skip if ports don't match. + } - return body; -} + if (!/^[.*]/.test(parsedProxyHostname)) { + // No wildcards, so stop proxying if there is an exact match. + return hostname !== parsedProxyHostname; + } -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param Mixed instance Any options.body input - */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } + if (parsedProxyHostname.charAt(0) === '*') { + // Remove leading wildcard. + parsedProxyHostname = parsedProxyHostname.slice(1); + } + // Stop proxying if the hostname ends with the no_proxy host. + return !stringEndsWith.call(hostname, parsedProxyHostname); + }); } /** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes + * Get the value for an environment variable. * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible + * @param {string} key - The name of the environment variable. + * @return {string} The value of the environment variable. + * @private */ -function getTotalBytes(instance) { - const body = instance.body; +function getEnv(key) { + return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; +} +exports.getProxyForUrl = getProxyForUrl; - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } -} -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; +/***/ }), +/***/ 67214: +/***/ ((module) => { - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } -} +"use strict"; -// expose Promise -Body.Promise = global.Promise; -/** - * headers.js - * - * Headers class offers convenient helpers - */ +const codes = {}; -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; } -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } } -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; } - return undefined; + return str.substring(this_len - search.length, this_len) === search; } -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } - this[MAP] = Object.create(null); + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } - return; - } + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } +module.exports.q = codes; - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +/***/ }), - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } +/***/ 41359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return this[MAP][key].join(', '); - } +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } +module.exports = Duplex; +var Readable = __nccwpck_require__(51433); +var Writable = __nccwpck_require__(32094); +__nccwpck_require__(44124)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 81542: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +var Transform = __nccwpck_require__(34415); +__nccwpck_require__(44124)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 51433: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } +module.exports = Readable; - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; +/**/ +var Duplex; +/**/ -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); +Readable.ReadableState = ReadableState; -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); +/**/ +var EE = (__nccwpck_require__(82361).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; +/**/ +var Stream = __nccwpck_require__(62387); +/**/ - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); +var Buffer = (__nccwpck_require__(14300).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; +/**/ +var debugUtil = __nccwpck_require__(73837); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; } +/**/ -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } +var BufferList = __nccwpck_require__(52746); +var destroyImpl = __nccwpck_require__(97049); +var _require = __nccwpck_require__(39948), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(67214)/* .codes */ .q), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +__nccwpck_require__(44124)(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(41359); + options = options || {}; - this[INTERNAL].index = index + 1; + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; - return obj; -} + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; -const INTERNALS$1 = Symbol('Response internals'); + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + // has it been destroyed + this.destroyed = false; -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - Body.call(this, body, opts); + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; - const status = opts.status || 200; - const headers = new Headers(opts.headers); + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(94841)/* .StringDecoder */ .s); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(41359); + if (!(this instanceof Readable)) return new Readable(options); - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } - get url() { - return this[INTERNALS$1].url || ''; - } + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; - get status() { - return this[INTERNALS$1].status; - } +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } - get redirected() { - return this[INTERNALS$1].counter > 0; - } + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; - get statusText() { - return this[INTERNALS$1].statusText; - } +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(94841)/* .StringDecoder */ .s); + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; - get headers() { - return this[INTERNALS$1].headers; - } + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; } -Body.mixIn(Response.prototype); +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); -/** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 - */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } } -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); } -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } - let parsedURL; + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); + // tell the dest that it's being piped to + dest.emit('pipe', src); - const headers = new Headers(init.headers || input.headers || {}); + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} - get method() { - return this[INTERNALS$2].method; - } +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); - get headers() { - return this[INTERNALS$2].headers; - } + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); - get redirect() { - return this[INTERNALS$2].redirect; - } + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } - get signal() { - return this[INTERNALS$2].signal; - } + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(43306); + } + return createReadableStreamAsyncIterator(this); + }; } - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } }); -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } }); -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(39082); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } +/***/ }), - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } +/***/ 34415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } +module.exports = Transform; +var _require$codes = (__nccwpck_require__(67214)/* .codes */ .q), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = __nccwpck_require__(41359); +__nccwpck_require__(44124)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; - this.type = 'aborted'; - this.message = message; +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); } -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -const URL$1 = Url.URL || whatwgUrl.URL; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; +/***/ }), - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; +/***/ 32094: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * isSameProtocol reports whether the two provided URLs use the same protocol. - * - * Both domains must already be in canonical form. - * @param {string|URL} original - * @param {string|URL} destination - */ -const isSameProtocol = function isSameProtocol(destination, original) { - const orig = new URL$1(original).protocol; - const dest = new URL$1(destination).protocol; +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - return orig === dest; -}; +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - Body.Promise = fetch.Promise; +module.exports = Writable; - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ - let response = null; +/**/ +var Duplex; +/**/ - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - destroyStream(request.body, error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; +Writable.WritableState = WritableState; - if (signal && signal.aborted) { - abort(); - return; - } +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(65278) +}; +/**/ - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; +/**/ +var Stream = __nccwpck_require__(62387); +/**/ - // send request - const req = send(options); - let reqTimeout; +var Buffer = (__nccwpck_require__(14300).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = __nccwpck_require__(97049); +var _require = __nccwpck_require__(39948), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(67214)/* .codes */ .q), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(44124)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(41359); + options = options || {}; - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + // if _final has been called + this.finalCalled = false; - if (response && response.body) { - destroyStream(response.body, err); - } + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; - finalize(); - }); + // has it been destroyed + this.destroyed = false; - fixResponseChunkedTransferBadEnding(req, function (err) { - if (signal && signal.aborted) { - return; - } + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; - if (response && response.body) { - destroyStream(response.body, err); - } - }); + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - /* c8 ignore next 18 */ - if (parseInt(process.version.substring(1)) < 14) { - // Before Node.js 14, pipeline() does not fully support async iterators and does not always - // properly handle when the socket close/end events are out of order. - req.on('socket', function (s) { - s.addListener('close', function (hadError) { - // if a data listener is still present we didn't end cleanly - const hasDataListener = s.listenerCount('data') > 0; + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; - // if end happened before close but the socket didn't emit an error, do it now - if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - response.body.emit('error', err); - } - }); - }); - } + // a flag to see when we're in the middle of a write. + this.writing = false; - req.on('response', function (res) { - clearTimeout(reqTimeout); + // when true all writes will be buffered until .uncork() call + this.corked = 0; - const headers = createHeadersLenient(res.headers); + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; - if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); + // count buffered requests + this.bufferedRequestCount = 0; - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(41359); - // HTTP-network fetch step 12.1.1.4: handle content codings + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - raw.on('end', function () { - // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. - if (!response) { - response = new Response(body, response_options); - resolve(response); - } - }); - return; - } +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} - writeToStream(req, request); - }); +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } } -function fixResponseChunkedTransferBadEnding(request, errorCallback) { - let socket; - request.on('socket', function (s) { - socket = s; - }); +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); - request.on('response', function (response) { - const headers = response.headers; + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { - response.once('close', function (hadError) { - // tests for socket presence, as in some situations the - // the 'socket' event is not triggered for the request - // (happens in deno), avoids `TypeError` - // if a data listener is still present we didn't end cleanly - const hasDataListener = socket && socket.listenerCount('data') > 0; + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } - if (hasDataListener && !hadError) { - const err = new Error('Premature close'); - err.code = 'ERR_STREAM_PREMATURE_CLOSE'; - errorCallback(err); - } - }); - } - }); + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; } +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } -function destroyStream(stream, err) { - if (stream.destroy) { - stream.destroy(err); - } else { - // node < 8 - stream.emit('error', err); - stream.end(); - } + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; } +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); }; -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports["default"] = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; -exports.AbortError = AbortError; - - /***/ }), -/***/ 1223: +/***/ 43306: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) +"use strict"; - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = __nccwpck_require__(76080); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } } - f.called = false - return f } +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; /***/ }), -/***/ 7684: +/***/ 52746: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const Queue = __nccwpck_require__(5185); - -const pLimit = concurrency => { - if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { - throw new TypeError('Expected `concurrency` to be a number from 1 and up'); - } - - const queue = new Queue(); - let activeCount = 0; - - const next = () => { - activeCount--; - - if (queue.size > 0) { - queue.dequeue()(); - } - }; - - const run = async (fn, resolve, ...args) => { - activeCount++; - - const result = (async () => fn(...args))(); - - resolve(result); - - try { - await result; - } catch {} - - next(); - }; - - const enqueue = (fn, resolve, ...args) => { - queue.enqueue(run.bind(null, fn, resolve, ...args)); - - (async () => { - // This function needs to wait until the next microtask before comparing - // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously - // when the run function is dequeued and called. The comparison in the if-statement - // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. - await Promise.resolve(); - - if (activeCount < concurrency && queue.size > 0) { - queue.dequeue()(); - } - })(); - }; - const generator = (fn, ...args) => new Promise(resolve => { - enqueue(fn, resolve, ...args); - }); +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = __nccwpck_require__(14300), + Buffer = _require.Buffer; +var _require2 = __nccwpck_require__(73837), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } - Object.defineProperties(generator, { - activeCount: { - get: () => activeCount - }, - pendingCount: { - get: () => queue.size - }, - clearQueue: { - value: () => { - queue.clear(); - } - } - }); + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } - return generator; -}; + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } -module.exports = pLimit; + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); /***/ }), -/***/ 3329: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 97049: +/***/ ((module) => { "use strict"; -var parseUrl = (__nccwpck_require__(7310).parse); - -var DEFAULT_PORTS = { - ftp: 21, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443, -}; - -var stringEndsWith = String.prototype.endsWith || function(s) { - return s.length <= this.length && - this.indexOf(s, this.length - s.length) !== -1; -}; - -/** - * @param {string|object} url - The URL, or the result from url.parse. - * @return {string} The URL of the proxy that should handle the request to the - * given URL. If no proxy is set, this will be an empty string. - */ -function getProxyForUrl(url) { - var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; - var proto = parsedUrl.protocol; - var hostname = parsedUrl.host; - var port = parsedUrl.port; - if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { - return ''; // Don't proxy URLs without a valid scheme or host. +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; } - proto = proto.split(':', 1)[0]; - // Stripping ports in this way instead of using parsedUrl.hostname to make - // sure that the brackets around IPv6 addresses are kept. - hostname = hostname.replace(/:\d*$/, ''); - port = parseInt(port) || DEFAULT_PORTS[proto] || 0; - if (!shouldProxy(hostname, port)) { - return ''; // Don't proxy URLs that match NO_PROXY. - } + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks - var proxy = - getEnv('npm_config_' + proto + '_proxy') || - getEnv(proto + '_proxy') || - getEnv('npm_config_proxy') || - getEnv('all_proxy'); - if (proxy && proxy.indexOf('://') === -1) { - // Missing scheme in proxy, default to the requested URL's scheme. - proxy = proto + '://' + proxy; + if (this._readableState) { + this._readableState.destroyed = true; } - return proxy; -} -/** - * Determines whether a given URL should be proxied. - * - * @param {string} hostname - The host name of the URL. - * @param {number} port - The effective port of the URL. - * @returns {boolean} Whether the given URL should be proxied. - * @private - */ -function shouldProxy(hostname, port) { - var NO_PROXY = - (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); - if (!NO_PROXY) { - return true; // Always proxy if NO_PROXY is not set. - } - if (NO_PROXY === '*') { - return false; // Never proxy if wildcard is set. + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; } - - return NO_PROXY.split(/[,\s]/).every(function(proxy) { - if (!proxy) { - return true; // Skip zero-length hosts. - } - var parsedProxy = proxy.match(/^(.+):(\d+)$/); - var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; - var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; - if (parsedProxyPort && parsedProxyPort !== port) { - return true; // Skip if ports don't match. - } - - if (!/^[.*]/.test(parsedProxyHostname)) { - // No wildcards, so stop proxying if there is an exact match. - return hostname !== parsedProxyHostname; - } - - if (parsedProxyHostname.charAt(0) === '*') { - // Remove leading wildcard. - parsedProxyHostname = parsedProxyHostname.slice(1); + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); } - // Stop proxying if the hostname ends with the no_proxy host. - return !stringEndsWith.call(hostname, parsedProxyHostname); }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } } - -/** - * Get the value for an environment variable. - * - * @param {string} key - The name of the environment variable. - * @return {string} The value of the environment variable. - * @private - */ -function getEnv(key) { - return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; +function emitErrorNT(self, err) { + self.emit('error', err); } +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. -exports.getProxyForUrl = getProxyForUrl; - + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; /***/ }), -/***/ 7214: -/***/ ((module) => { +/***/ 76080: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). -const codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error - } - - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } - } - - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); - } - } - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - - codes[code] = NodeError; -} -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; - } else { - return `of ${thing} ${expected[0]}`; +var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(67214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE); +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; } - } else { - return `of ${thing} ${String(expected)}`; - } -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; + callback.apply(this, args); + }; } - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; } - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; } - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; - } - - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.q = codes; - +module.exports = eos; /***/ }), -/***/ 1359: +/***/ 39082: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) keys.push(key); - return keys; -}; -/**/ - -module.exports = Duplex; -var Readable = __nccwpck_require__(1433); -var Writable = __nccwpck_require__(2094); -__nccwpck_require__(4124)(Duplex, Readable); -{ - // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } -} -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(67214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE); +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); } + }; + function next() { + return _next2.apply(this, arguments); } -} -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); } -}); - -// the no-half-open enforcer -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - process.nextTick(onEndNT, this); -} -function onEndNT(self) { - self.end(); + return readable; } -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } +module.exports = from; - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } -}); /***/ }), -/***/ 1542: +/***/ 76989: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = (__nccwpck_require__(67214)/* .codes */ .q), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(76080); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; -module.exports = PassThrough; -var Transform = __nccwpck_require__(4415); -__nccwpck_require__(4124)(PassThrough, Transform); -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; } -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; /***/ }), -/***/ 1433: +/***/ 39948: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(67214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE); +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; -module.exports = Readable; +/***/ }), -/**/ -var Duplex; -/**/ +/***/ 62387: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Readable.ReadableState = ReadableState; +module.exports = __nccwpck_require__(12781); -/**/ -var EE = (__nccwpck_require__(2361).EventEmitter); -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ -/**/ -var Stream = __nccwpck_require__(2387); -/**/ +/***/ }), -var Buffer = (__nccwpck_require__(4300).Buffer); -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} +/***/ 51642: +/***/ ((module, exports, __nccwpck_require__) => { -/**/ -var debugUtil = __nccwpck_require__(3837); -var debug; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); +var Stream = __nccwpck_require__(12781); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; } else { - debug = function debug() {}; + exports = module.exports = __nccwpck_require__(51433); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(32094); + exports.Duplex = __nccwpck_require__(41359); + exports.Transform = __nccwpck_require__(34415); + exports.PassThrough = __nccwpck_require__(81542); + exports.finished = __nccwpck_require__(76080); + exports.pipeline = __nccwpck_require__(76989); } -/**/ -var BufferList = __nccwpck_require__(6522); -var destroyImpl = __nccwpck_require__(7049); -var _require = __nccwpck_require__(9948), - getHighWaterMark = _require.getHighWaterMark; -var _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; -// Lazy loaded to improve the startup performance. -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; -__nccwpck_require__(4124)(Readable, Stream); -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); +/***/ }), - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(1359); - options = options || {}; +/***/ 63515: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; +"use strict"; - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); +const {PassThrough} = __nccwpck_require__(12781); +const extend = __nccwpck_require__(38171); - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; +const DEFAULTS = { + objectMode: false, + retries: 2, - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, - // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, - // Should .destroy() be called after 'end' (and potentially 'finish') - this.autoDestroy = !!options.autoDestroy; + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, - // has it been destroyed - this.destroyed = false; + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; } -} -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - if (!(this instanceof Readable)) return new Readable(options); - // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; - // legacy - this.readable = true; - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof opts === 'function') { + callback = opts; } - Stream.call(this); -} -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); } - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } + makeRequest(); } - // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - return !state.ended && (state.length < state.highWaterMark || state.length === 0); -} -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); + if (streamMode) { + return retryStream; } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); - } - maybeReadMore(stream, state); -} -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + return retryRequest; } - return er; -} -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(4841)/* .StringDecoder */ .s); - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; - // If setEncoding(null), decoder.encoding equals utf8 - this._readableState.encoding = this._readableState.decoder.encoding; + function resetStreams() { + delayStream = null; - // Iterate over current buffer to convert already stored Buffers: - var p = this._readableState.buffer.head; - var content = ''; - while (p !== null) { - content += decoder.write(p.data); - p = p.next; - } - this._readableState.buffer.clear(); - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); -// Don't raise the hwm > 1GB -var MAX_HWM = 0x40000000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } } - return n; -} -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; -} + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - n = howMuchToRead(n, state); + if (streamMode) { + streamResponseHandled = false; - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } } - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); + setTimeout(makeRequest, nextRetryDelay); } - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; - } - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); - } - if (ret !== null) this.emit('data', ret); - return ret; -}; -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; } - } - state.ended = true; - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; } - } -} -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } } } -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } - // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); -} +module.exports = retryRequest; -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); - } -} -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break; - } - state.readingMore = false; +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); } -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - function onend() { - debug('onend'); - dest.end(); - } +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - } - src.pause(); - } - } +/***/ }), - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } +/***/ 71604: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); +module.exports = __nccwpck_require__(56244); - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } +/***/ }), - // tell the dest that it's being piped to - dest.emit('pipe', src); +/***/ 56244: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - return dest; +var RetryOperation = __nccwpck_require__(45369); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); }; -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); } - // slow case. multiple pipe destinations. + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - return this; + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); } - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; }; -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; - // Try start flowing on next tick if stream isn't explicitly paused - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); } } } - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; -Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - return res; -}; -Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; } - return res; }; -function updateReadableListening(self) { - var state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; - // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); + +/***/ }), + +/***/ 45369: +/***/ ((module) => { + +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); } } -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); } -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - // we flow only if there is no one listening - // for readable, but we still have to call - // resume() - state.flowing = !state.readableListening; - resume(this, state); +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); } - state.paused = false; - return this; + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; }; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); } -} -function resume_(stream, state) { - debug('resume', state.reading); - if (!state.reading) { - stream.read(0); + + if (!err) { + return false; } - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; } - this._readableState.paused = true; - return this; -}; -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null); -} -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var _this = this; - var state = this._readableState; - var paused = false; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - _this.push(null); - }); - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); + this._errors.push(err); - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; } - }); + } - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } } - } - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); } - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - return this; + return true; }; -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = __nccwpck_require__(3306); + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; } - return createReadableStreamAsyncIterator(this); - }; -} -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; } -}); -// exposed for testing purposes only. -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.length; + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } } -}); -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); + return mainError; +}; + + +/***/ }), + +/***/ 21867: +/***/ ((module, exports, __nccwpck_require__) => { + +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(14300) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] } - return ret; } -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') } + return Buffer(arg, encodingOrOffset, length) } -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); - } +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) } + } else { + buf.fill(0) } + return buf } -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = __nccwpck_require__(9082); - } - return from(Readable, iterable, opts); - }; + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) } -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') } - return -1; + return buffer.SlowBuffer(size) } + /***/ }), -/***/ 4415: +/***/ 79626: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +var stubs = __nccwpck_require__(1099) -module.exports = Transform; -var _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; -var Duplex = __nccwpck_require__(1359); -__nccwpck_require__(4124)(Transform, Duplex); -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } - ts.writechunk = null; - ts.writecb = null; - if (data != null) - // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; + var cfg = { + callthrough: true, + calls: 1 } - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); -} -function prefinish() { - var _this = this; - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); - } + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; +module.exports = StreamEvents -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; + +/***/ }), + +/***/ 66121: +/***/ ((module) => { + +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + var idx = state.bufferIndex || 0 + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { + return state.buffer[idx].length + } } -}; -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - }); -}; -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) - // single equals check for both `null` and `undefined` - stream.push(data); - // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); + return state.length } + /***/ }), -/***/ 2094: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 94841: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. @@ -38454,29782 +70081,37572 @@ function done(stream, er, data) { // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - - - -module.exports = Writable; - -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - this.next = null; - this.entry = null; - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ - -/**/ -var Duplex; -/**/ -Writable.WritableState = WritableState; /**/ -var internalUtil = { - deprecate: __nccwpck_require__(7127) -}; -/**/ -/**/ -var Stream = __nccwpck_require__(2387); +var Buffer = (__nccwpck_require__(21867).Buffer); /**/ -var Buffer = (__nccwpck_require__(4300).Buffer); -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} -var destroyImpl = __nccwpck_require__(7049); -var _require = __nccwpck_require__(9948), - getHighWaterMark = _require.getHighWaterMark; -var _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; -var errorOrDestroy = destroyImpl.errorOrDestroy; -__nccwpck_require__(4124)(Writable, Stream); -function nop() {} -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(1359); - options = options || {}; - - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); - - // if _final has been called - this.finalCalled = false; - - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // has it been destroyed - this.destroyed = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; - - // Should .destroy() be called after 'finish' (and potentially 'end') - this.autoDestroy = !!options.autoDestroy; - - // count buffered requests - this.bufferedRequestCount = 0; - - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; } - return out; }; -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; -} -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(1359); - - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 - var isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); - - // legacy. - this.writable = true; - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; } - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); - // TODO: defer error events consistently everywhere, not just the cb - errorOrDestroy(stream, er); - process.nextTick(cb, er); + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; } -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var er; - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); - } - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.s = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; } - return true; + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); } -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; } - return ret; -}; -Writable.prototype.cork = function () { - this._writableState.corked++; + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; }; -Writable.prototype.uncork = function () { - var state = this._writableState; - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; }; -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - return chunk; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; } -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; } -}); - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; } - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); + return nb; } - return ret; -} -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; + return 0; } -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; } -} -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; } - if (sync) { - process.nextTick(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } } } } -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; } -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); } } - if (entry === null) state.lastBufferedRequest = null; + return r; } - state.bufferedRequest = entry; - state.bufferProcessing = false; + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); } -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); -}; -Writable.prototype._writev = null; -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); } + return r; +} - // ignore unnecessary end() calls. - if (!state.ending) endWritable(this, state, cb); - return this; -}; -Object.defineProperty(Writable.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; } -}); -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; + return buf.toString('base64', i, buf.length - n); } -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - if (err) { - errorOrDestroy(stream, err); - } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; } -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.pendingcb++; - state.finalCalled = true; - process.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); } -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well - var rState = stream._readableState; - if (!rState || rState.autoDestroy && rState.endEmitted) { - stream.destroy(); + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), + +/***/ 14526: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; +// const octRegex = /0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +//polyfill +if (!Number.parseInt && window.parseInt) { + Number.parseInt = window.parseInt; +} +if (!Number.parseFloat && window.parseFloat) { + Number.parseFloat = window.parseFloat; +} + + +const consider = { + hex : true, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + // const options = Object.assign({}, consider); + // if(opt.leadingZeros === false){ + // options.leadingZeros = false; + // }else if(opt.hex === false){ + // options.hex = false; + // } + + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + // if(trimmedStr === "0.0") return 0; + // else if(trimmedStr === "+0.0") return 0; + // else if(trimmedStr === "-0.0") return -0; + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if (options.hex && hexRegex.test(trimmedStr)) { + return Number.parseInt(trimmedStr, 16); + // } else if (options.parseOct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + const eNotation = match[4] || match[6]; + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(eNotation){ //given number has enotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + // const decimalPart = match[5].substr(1); + // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + + + // const p = numStr.indexOf("."); + // const givenIntPart = numStr.substr(0,p); + // const givenDecPart = numStr.substr(p+1); + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + // if(numTrimmedByZeros === numStr){ + // if(options.leadingZeros) return num; + // else return str; + // }else return str; + if(numTrimmedByZeros === numStr) return num; + else if(sign+numTrimmedByZeros === numStr) return num; + else return str; + } + + if(trimmedStr === numStr) return num; + else if(trimmedStr === sign+numStr) return num; + // else{ + // //number with +/- sign + // trimmedStr.test(/[-+][0-9]); + + // } + return str; + } + // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; + + }else{ //non-numeric string + return str; } - } } - } - return need; } -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) process.nextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; } -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; +module.exports = toNumber + + +/***/ }), + +/***/ 1099: +/***/ ((module) => { + +"use strict"; + + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} } - // reuse the free corkReq. - state.corkedRequestsFree.next = corkReq; -} -Object.defineProperty(Writable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; + return returnVal } -}); -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - cb(err); -}; +} + /***/ }), -/***/ 3306: +/***/ 59318: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +const os = __nccwpck_require__(22037); +const tty = __nccwpck_require__(76224); +const hasFlag = __nccwpck_require__(31621); -var _Object$setPrototypeO; -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -var finished = __nccwpck_require__(6080); -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); -function createIterResult(value, done) { - return { - value: value, - done: done - }; +const {env} = process; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; } -function readAndResolve(iter) { - var resolve = iter[kLastResolve]; - if (resolve !== null) { - var data = iter[kStream].read(); - // we defer if data is null - // we can be expecting either 'end' or - // 'error' - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); - } - } + +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } } -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; } -function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } - iter[kHandlePromise](resolve, reject); - }, reject); - }; + +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; } -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { - get stream() { - return this[kStream]; - }, - next: function next() { - var _this = this; - // if we have detected an error in the meanwhile - // reject straight away - var error = this[kError]; - if (error !== null) { - return Promise.reject(error); - } - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); - } - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); - } - // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - var lastPromise = this[kLastPromise]; - var promise; - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); - } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - var data = this[kStream].read(); - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } - promise = new Promise(this[kHandlePromise]); - } - this[kLastPromise] = promise; - return promise; - } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } - resolve(createIterResult(undefined, true)); - }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true - }), _Object$create)); - iterator[kLastPromise] = null; - finished(stream, function (err) { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; - // reject if we are waiting for data in the Promise - // returned by next() and store the error - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } - iterator[kError] = err; - return; - } - var resolve = iterator[kLastResolve]; - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); - } - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) }; -module.exports = createReadableStreamAsyncIterator; + /***/ }), -/***/ 6522: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 14920: +/***/ ((__unused_webpack_module, exports) => { "use strict"; - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -var _require = __nccwpck_require__(4300), - Buffer = _require.Buffer; -var _require2 = __nccwpck_require__(3837), - inspect = _require2.inspect; -var custom = inspect && inspect.custom || 'inspect'; -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} -module.exports = /*#__PURE__*/function () { - function BufferList() { - _classCallCheck(this, BufferList); - this.head = null; - this.tail = null; - this.length = 0; - } - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; - } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) ret += s + p.data; - return ret; +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); } - - // Consumes a specified amount of bytes or characters from the buffered data. - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } - return ret; + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); } - }, { - key: "first", - value: function first() { - return this.head.data; + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; } - - // Consumes a specified amount of characters from the buffered data. - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - this.length -= c; - return ret; + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; } - - // Consumes a specified amount of bytes from the buffered data. - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - break; + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); } - ++c; - } - this.length -= c; - return ret; } - - // Make sure the linked list only shows the minimal necessary information. - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread(_objectSpread({}, options), {}, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; } - }]); - return BufferList; -}(); + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map /***/ }), -/***/ 7049: -/***/ ((module) => { +/***/ 60446: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - var _this = this; - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); - } +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(13685); +const https_1 = __nccwpck_require__(95687); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(57310); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; } - return this; - } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(23764) + : __nccwpck_require__(66202); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks +/***/ }), - if (this._readableState) { - this._readableState.destroyed = true; - } +/***/ 6886: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; - } - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); - } else { - process.nextTick(emitCloseNT, _this); - } - } else if (cb) { - process.nextTick(emitCloseNT, _this); - cb(err); - } else { - process.nextTick(emitCloseNT, _this); - } - }); - return this; -} -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); +"use strict"; + +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(80467); +const stream_1 = __nccwpck_require__(12781); +const uuid = __nccwpck_require__(75840); +const agents_1 = __nccwpck_require__(60446); +const TeenyStatistics_1 = __nccwpck_require__(14920); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(79626); +class RequestError extends Error { } -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(63477); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; } -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; - } - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; } -function emitErrorNT(self, err) { - self.emit('error', err); +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; } -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - - var rState = stream._readableState; - var wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; } -module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); }; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6080: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 76503: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). - - -var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(7214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE); -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - callback.apply(this, args); - }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(82361); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const promisify_1 = __importDefault(__nccwpck_require__(55947)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; } -function noop() {} -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); } -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; - var onlegacyfinish = function onlegacyfinish() { - if (!stream.writable) onfinish(); - }; - var writableEnded = stream._writableState && stream._writableState.finished; - var onfinish = function onfinish() { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; - var readableEnded = stream._readableState && stream._readableState.endEmitted; - var onend = function onend() { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; - var onerror = function onerror(err) { - callback.call(stream, err); - }; - var onclose = function onclose() { - var err; - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; - var onrequest = function onrequest() { - stream.req.on('finish', onfinish); - }; - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); } -module.exports = eos; +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 9082: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 55947: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } -function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE); -function from(Readable, iterable, opts) { - var iterator; - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - var readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); - // Reading boolean to protect against _read - // being called before last iteration completion. - var reading = false; - readable._read = function () { - if (!reading) { - reading = true; - next(); +/***/ }), + +/***/ 98455: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(41808)); +const tls_1 = __importDefault(__nccwpck_require__(24404)); +const url_1 = __importDefault(__nccwpck_require__(57310)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const agent_base_1 = __nccwpck_require__(76503); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(94037)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); } - }; - function next() { - return _next2.apply(this, arguments); - } - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - var _yield$iterator$next = yield iterator.next(), - value = _yield$iterator$next.value, - done = _yield$iterator$next.done; - if (done) { - readable.push(null); - } else if (readable.push(yield value)) { - next(); - } else { - reading = false; +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); - } - return readable; + } + return ret; } -module.exports = from; - +//# sourceMappingURL=agent.js.map /***/ }), -/***/ 6989: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 66202: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(98455)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map +/***/ }), -var eos; -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - callback.apply(void 0, arguments); - }; -} -var _require$codes = (__nccwpck_require__(7214)/* .codes */ .q), - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - var closed = false; - stream.on('close', function () { - closed = true; - }); - if (eos === undefined) eos = __nccwpck_require__(6080); - eos(stream, { - readable: reading, - writable: writing - }, function (err) { - if (err) return callback(err); - closed = true; - callback(); - }); - var destroyed = false; - return function (err) { - if (closed) return; - if (destroyed) return; - destroyed = true; +/***/ 94037: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - // request.destroy just do .end - .abort is what we want - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; -} -function call(fn) { - fn(); -} -function pipe(from, to) { - return from.pipe(to); -} -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); -} -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; - } - var callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); - } - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); }); - }); - return streams.reduce(pipe); } -module.exports = pipeline; +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map /***/ }), -/***/ 9948: +/***/ 84256: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(7214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE); -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} -function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); - } - return Math.floor(hwm); - } +var punycode = __nccwpck_require__(85477); +var mappingTable = __nccwpck_require__(72020); - // Default value - return state.objectMode ? 16 : 16 * 1024; -} -module.exports = { - getHighWaterMark: getHighWaterMark +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 }; -/***/ }), - -/***/ 2387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(2781); +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; -/***/ }), + while (start <= end) { + var mid = Math.floor((start + end) / 2); -/***/ 1642: -/***/ ((module, exports, __nccwpck_require__) => { + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } -var Stream = __nccwpck_require__(2781); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(1433); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(2094); - exports.Duplex = __nccwpck_require__(1359); - exports.Transform = __nccwpck_require__(4415); - exports.PassThrough = __nccwpck_require__(1542); - exports.finished = __nccwpck_require__(6080); - exports.pipeline = __nccwpck_require__(6989); + return null; } +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; -/***/ }), +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} -/***/ 3515: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; -"use strict"; + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } -const {PassThrough} = __nccwpck_require__(2781); -const extend = __nccwpck_require__(8171); + processed += String.fromCodePoint(codePoint); + break; + } + } -let debug = () => {}; -if ( - typeof process !== 'undefined' && - 'env' in process && - typeof process.env === 'object' && - process.env.DEBUG === 'retry-request' -) { - debug = message => { - console.log('retry-request:', message); + return { + string: processed, + error: hasError }; } -const DEFAULTS = { - objectMode: false, - retries: 2, +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; - /* - The maximum time to delay in seconds. If retryDelayMultiplier results in a - delay greater than maxRetryDelay, retries should delay by maxRetryDelay - seconds instead. - */ - maxRetryDelay: 64, +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } - /* - The multiplier by which to increase the delay time between the completion of - failed requests, and the initiation of the subsequent retrying request. - */ - retryDelayMultiplier: 2, + var error = false; - /* - The length of time to keep retrying in seconds. The last sleep period will - be shortened as necessary, so that the last retry runs at deadline (and not - considerably beyond it). The total time starting from when the initial - request is sent, after which an error will be returned, regardless of the - retrying attempts made meanwhile. - */ - totalTimeout: 600, + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } - noResponseRetries: 2, - currentRetryAttempt: 0, - shouldRetryFn: function (response) { - const retryRanges = [ - // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes - // 1xx - Retry (Informational, request still processing) - // 2xx - Do not retry (Success) - // 3xx - Do not retry (Redirect) - // 4xx - Do not retry (Client errors) - // 429 - Retry ("Too Many Requests") - // 5xx - Retry (Server errors) - [100, 199], - [429, 429], - [500, 599], - ]; + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } - const statusCode = response.statusCode; - debug(`Response status: ${statusCode}`); + return { + label: label, + error: error + }; +} - let range; - while ((range = retryRanges.shift())) { - if (statusCode >= range[0] && statusCode <= range[1]) { - // Not a successful status or redirect. - return true; - } - } - }, -}; +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); -function retryRequest(requestOpts, opts, callback) { - if (typeof requestOpts === 'string') { - requestOpts = {url: requestOpts}; + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } } - const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + return { + string: labels.join("."), + error: result.error + }; +} - if (typeof opts === 'function') { - callback = opts; - } +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); - const manualCurrentRetryAttemptWasSet = - opts && typeof opts.currentRetryAttempt === 'number'; - opts = extend({}, DEFAULTS, opts); + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } - if (typeof opts.request === 'undefined') { - throw new Error('A request library must be provided to retry-request.'); + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } } - let currentRetryAttempt = opts.currentRetryAttempt; - - let numNoResponseAttempts = 0; - let streamResponseHandled = false; + if (result.error) return null; + return labels.join("."); +}; - let retryStream; - let requestStream; - let delayStream; +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); - let activeRequest; - const retryRequest = { - abort: function () { - if (activeRequest && activeRequest.abort) { - activeRequest.abort(); - } - }, + return { + domain: result.string, + error: result.error }; +}; - if (streamMode) { - retryStream = new PassThrough({objectMode: opts.objectMode}); - retryStream.abort = resetStreams; - } - - const timeOfFirstRequest = Date.now(); - if (currentRetryAttempt > 0) { - retryAfterDelay(currentRetryAttempt); - } else { - makeRequest(); - } +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; - if (streamMode) { - return retryStream; - } else { - return retryRequest; - } - function resetStreams() { - delayStream = null; +/***/ }), - if (requestStream) { - requestStream.abort && requestStream.abort(); - requestStream.cancel && requestStream.cancel(); +/***/ 4351: +/***/ ((module) => { - if (requestStream.destroy) { - requestStream.destroy(); - } else if (requestStream.end) { - requestStream.end(); - } +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); } - } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; - function makeRequest() { - let finishHandled = false; - currentRetryAttempt++; - debug(`Current retry attempt: ${currentRetryAttempt}`); + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; - function handleFinish(args = []) { - if (!finishHandled) { - finishHandled = true; - retryStream.emit('complete', ...args); - } - } + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; - if (streamMode) { - streamResponseHandled = false; + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; - delayStream = new PassThrough({objectMode: opts.objectMode}); - requestStream = opts.request(requestOpts); + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; - setImmediate(() => { - retryStream.emit('request'); - }); + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; - requestStream - // gRPC via google-cloud-node can emit an `error` as well as a `response` - // Whichever it emits, we run with-- we can't run with both. That's what - // is up with the `streamResponseHandled` tracking. - .on('error', err => { - if (streamResponseHandled) { - return; - } + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; - streamResponseHandled = true; - onResponse(err); - }) - .on('response', (resp, body) => { - if (streamResponseHandled) { - return; - } + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; - streamResponseHandled = true; - onResponse(null, resp, body); - }) - .on('complete', (...params) => handleFinish(params)) - .on('finish', (...params) => handleFinish(params)); + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; - requestStream.pipe(delayStream); - } else { - activeRequest = opts.request(requestOpts, onResponse); - } - } + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; - function retryAfterDelay(currentRetryAttempt) { - if (streamMode) { - resetStreams(); - } + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; - const nextRetryDelay = getNextRetryDelay({ - maxRetryDelay: opts.maxRetryDelay, - retryDelayMultiplier: opts.retryDelayMultiplier, - retryNumber: currentRetryAttempt, - timeOfFirstRequest, - totalTimeout: opts.totalTimeout, + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; }); - debug(`Next retry delay: ${nextRetryDelay}`); - if (nextRetryDelay <= 0) { - numNoResponseAttempts = opts.noResponseRetries + 1; - return; - } + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; - setTimeout(makeRequest, nextRetryDelay); - } + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; - function onResponse(err, response, body) { - // An error such as DNS resolution. - if (err) { - numNoResponseAttempts++; + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; - if (numNoResponseAttempts <= opts.noResponseRetries) { - retryAfterDelay(numNoResponseAttempts); - } else { - if (streamMode) { - retryStream.emit('error', err); - retryStream.end(); - } else { - callback(err, response, body); + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } } - } + return to.concat(ar || Array.prototype.slice.call(from)); + }; - return; - } + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; - // Send the response to see if we should try again. - // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts - // the very first request sent as the first "retry". It is only accurate - // when a user provides their own "currentRetryAttempt" option at - // instantiation. - const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet - ? currentRetryAttempt - : currentRetryAttempt - 1; - if ( - adjustedCurrentRetryAttempt < opts.retries && - opts.shouldRetryFn(response) - ) { - retryAfterDelay(currentRetryAttempt); - return; - } + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; - // No more attempts need to be made, just continue on. - if (streamMode) { - retryStream.emit('response', response); - delayStream.pipe(retryStream); - requestStream.on('error', err => { - retryStream.destroy(err); - }); - } else { - callback(err, response, body); - } - } -} + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; -module.exports = retryRequest; + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; -function getNextRetryDelay(config) { - const { - maxRetryDelay, - retryDelayMultiplier, - retryNumber, - timeOfFirstRequest, - totalTimeout, - } = config; + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; - const maxRetryDelayMs = maxRetryDelay * 1000; - const totalTimeoutMs = totalTimeout * 1000; + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; - const jitter = Math.floor(Math.random() * 1000); - const calculatedNextRetryDelay = - Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; - const maxAllowableDelayMs = - totalTimeoutMs - (Date.now() - timeOfFirstRequest); + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; - return Math.min( - calculatedNextRetryDelay, - maxAllowableDelayMs, - maxRetryDelayMs - ); -} + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; -module.exports.defaults = DEFAULTS; -module.exports.getNextRetryDelay = getNextRetryDelay; + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); +}); /***/ }), -/***/ 1604: +/***/ 74294: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(6244); +module.exports = __nccwpck_require__(54219); + /***/ }), -/***/ 6244: +/***/ 54219: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var RetryOperation = __nccwpck_require__(5369); - -exports.operation = function(options) { - var timeouts = exports.timeouts(options); - return new RetryOperation(timeouts, { - forever: options && (options.forever || options.retries === Infinity), - unref: options && options.unref, - maxRetryTime: options && options.maxRetryTime - }); -}; +"use strict"; -exports.timeouts = function(options) { - if (options instanceof Array) { - return [].concat(options); - } - var opts = { - retries: 10, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: Infinity, - randomize: false - }; - for (var key in options) { - opts[key] = options[key]; - } +var net = __nccwpck_require__(41808); +var tls = __nccwpck_require__(24404); +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var events = __nccwpck_require__(82361); +var assert = __nccwpck_require__(39491); +var util = __nccwpck_require__(73837); - if (opts.minTimeout > opts.maxTimeout) { - throw new Error('minTimeout is greater than maxTimeout'); - } - var timeouts = []; - for (var i = 0; i < opts.retries; i++) { - timeouts.push(this.createTimeout(i, opts)); - } +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; - if (options && options.forever && !timeouts.length) { - timeouts.push(this.createTimeout(i, opts)); - } - // sort the array numerically ascending - timeouts.sort(function(a,b) { - return a - b; - }); +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} - return timeouts; -}; +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} -exports.createTimeout = function(attempt, opts) { - var random = (opts.randomize) - ? (Math.random() + 1) - : 1; +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} - var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); - timeout = Math.min(timeout, opts.maxTimeout); +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} - return timeout; -}; -exports.wrap = function(obj, options, methods) { - if (options instanceof Array) { - methods = options; - options = null; - } +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; - if (!methods) { - methods = []; - for (var key in obj) { - if (typeof obj[key] === 'function') { - methods.push(key); + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; } } - } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); - for (var i = 0; i < methods.length; i++) { - var method = methods[i]; - var original = obj[method]; +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - obj[method] = function retryWrapper(original) { - var op = exports.operation(options); - var args = Array.prototype.slice.call(arguments, 1); - var callback = args.pop(); + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } - args.push(function(err) { - if (op.retry(err)) { - return; - } - if (err) { - arguments[0] = op.mainError(); - } - callback.apply(this, arguments); - }); + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); - op.attempt(function() { - original.apply(obj, args); - }); - }.bind(obj, original); - obj[method].options = options; - } + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); }; +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); -/***/ }), + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } -/***/ 5369: -/***/ ((module) => { + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); -function RetryOperation(timeouts, options) { - // Compatibility for the old (timeouts, retryForever) signature - if (typeof options === 'boolean') { - options = { forever: options }; + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; } - this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); - this._timeouts = timeouts; - this._options = options || {}; - this._maxRetryTime = options && options.maxRetryTime || Infinity; - this._fn = null; - this._errors = []; - this._attempts = 1; - this._operationTimeout = null; - this._operationTimeoutCb = null; - this._timeout = null; - this._operationStart = null; - this._timer = null; - - if (this._options.forever) { - this._cachedTimeouts = this._timeouts.slice(0); + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); } -} -module.exports = RetryOperation; -RetryOperation.prototype.reset = function() { - this._attempts = 1; - this._timeouts = this._originalTimeouts.slice(0); -} + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); -RetryOperation.prototype.stop = function() { - if (this._timeout) { - clearTimeout(this._timeout); - } - if (this._timer) { - clearTimeout(this._timer); + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); } - this._timeouts = []; - this._cachedTimeouts = null; -}; + function onError(cause) { + connectReq.removeAllListeners(); -RetryOperation.prototype.retry = function(err) { - if (this._timeout) { - clearTimeout(this._timeout); + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); } +}; - if (!err) { - return false; +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; } - var currentTime = new Date().getTime(); - if (err && currentTime - this._operationStart >= this._maxRetryTime) { - this._errors.push(err); - this._errors.unshift(new Error('RetryOperation timeout occurred')); - return false; + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); } +}; - this._errors.push(err); +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); - var timeout = this._timeouts.shift(); - if (timeout === undefined) { - if (this._cachedTimeouts) { - // retry forever, only keep last error - this._errors.splice(0, this._errors.length - 1); - timeout = this._cachedTimeouts.slice(-1); - } else { - return false; - } + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } + return host; // for v0.11 or later +} - var self = this; - this._timer = setTimeout(function() { - self._attempts++; - - if (self._operationTimeoutCb) { - self._timeout = setTimeout(function() { - self._operationTimeoutCb(self._attempts); - }, self._operationTimeout); - - if (self._options.unref) { - self._timeout.unref(); +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } } } - - self._fn(self._attempts); - }, timeout); - - if (this._options.unref) { - this._timer.unref(); } + return target; +} - return true; -}; - -RetryOperation.prototype.attempt = function(fn, timeoutOps) { - this._fn = fn; - if (timeoutOps) { - if (timeoutOps.timeout) { - this._operationTimeout = timeoutOps.timeout; - } - if (timeoutOps.cb) { - this._operationTimeoutCb = timeoutOps.cb; +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); } + console.error.apply(console, args); } +} else { + debug = function() {}; +} +exports.debug = debug; // for test - var self = this; - if (this._operationTimeoutCb) { - this._timeout = setTimeout(function() { - self._operationTimeoutCb(); - }, self._operationTimeout); - } - - this._operationStart = new Date().getTime(); - this._fn(this._attempts); -}; +/***/ }), -RetryOperation.prototype.try = function(fn) { - console.log('Using RetryOperation.try() is deprecated'); - this.attempt(fn); -}; +/***/ 41773: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -RetryOperation.prototype.start = function(fn) { - console.log('Using RetryOperation.start() is deprecated'); - this.attempt(fn); -}; +"use strict"; -RetryOperation.prototype.start = RetryOperation.prototype.try; -RetryOperation.prototype.errors = function() { - return this._errors; -}; +const Client = __nccwpck_require__(33598) +const Dispatcher = __nccwpck_require__(60412) +const errors = __nccwpck_require__(48045) +const Pool = __nccwpck_require__(4634) +const BalancedPool = __nccwpck_require__(37931) +const Agent = __nccwpck_require__(7890) +const util = __nccwpck_require__(83983) +const { InvalidArgumentError } = errors +const api = __nccwpck_require__(44059) +const buildConnector = __nccwpck_require__(82067) +const MockClient = __nccwpck_require__(58687) +const MockAgent = __nccwpck_require__(66771) +const MockPool = __nccwpck_require__(26193) +const mockErrors = __nccwpck_require__(50888) +const ProxyAgent = __nccwpck_require__(97858) +const RetryHandler = __nccwpck_require__(82286) +const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(21892) +const DecoratorHandler = __nccwpck_require__(46930) +const RedirectHandler = __nccwpck_require__(72860) +const createRedirectInterceptor = __nccwpck_require__(38861) -RetryOperation.prototype.attempts = function() { - return this._attempts; -}; +let hasCrypto +try { + __nccwpck_require__(6113) + hasCrypto = true +} catch { + hasCrypto = false +} -RetryOperation.prototype.mainError = function() { - if (this._errors.length === 0) { - return null; - } +Object.assign(Dispatcher.prototype, api) - var counts = {}; - var mainError = null; - var mainErrorCount = 0; +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler - for (var i = 0; i < this._errors.length; i++) { - var error = this._errors[i]; - var message = error.message; - var count = (counts[message] || 0) + 1; +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor - counts[message] = count; +module.exports.buildConnector = buildConnector +module.exports.errors = errors - if (count >= mainErrorCount) { - mainError = error; - mainErrorCount = count; +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null } - } - - return mainError; -}; + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') + } -/***/ }), - -/***/ 1867: -/***/ ((module, exports, __nccwpck_require__) => { - -/*! safe-buffer. MIT License. Feross Aboukhadijeh */ -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(4300) -var Buffer = buffer.Buffer + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] - } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } -SafeBuffer.prototype = Object.create(Buffer.prototype) + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) + url = util.parseURL(url) + } -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') - } - return Buffer(arg, encodingOrOffset, length) -} + const { agent, dispatcher = getGlobalDispatcher() } = opts -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') } - } else { - buf.fill(0) - } - return buf -} - -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) -} -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) } - return buffer.SlowBuffer(size) } +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher -/***/ }), - -/***/ 9626: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = (__nccwpck_require__(74881).fetch) + } + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } -var stubs = __nccwpck_require__(1099) + throw err + } + } + module.exports.Headers = __nccwpck_require__(10554).Headers + module.exports.Response = __nccwpck_require__(27823).Response + module.exports.Request = __nccwpck_require__(48359).Request + module.exports.FormData = __nccwpck_require__(72015).FormData + module.exports.File = __nccwpck_require__(78511).File + module.exports.FileReader = __nccwpck_require__(1446).FileReader -/* - * StreamEvents can be used 2 ways: - * - * 1: - * function MyStream() { - * require('stream-events').call(this) - * } - * - * 2: - * require('stream-events')(myStream) - */ -function StreamEvents(stream) { - stream = stream || this + const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(71246) - var cfg = { - callthrough: true, - calls: 1 - } + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin - stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) - stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + const { CacheStorage } = __nccwpck_require__(37907) + const { kConstruct } = __nccwpck_require__(29174) - return stream + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) } -module.exports = StreamEvents +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(41724) + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie -/***/ }), + const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -/***/ 6121: -/***/ ((module) => { + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType +} -module.exports = shift +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = __nccwpck_require__(54284) -function shift (stream) { - var rs = stream._readableState - if (!rs) return null - return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) + module.exports.WebSocket = WebSocket } -function getStateLength (state) { - if (state.buffer.length) { - var idx = state.bufferIndex || 0 - // Since node 6.3.0 state.buffer is a BufferList not an array - if (state.buffer.head) { - return state.buffer.head.data.length - } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { - return state.buffer[idx].length - } - } +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) - return state.length -} +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors /***/ }), -/***/ 4841: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7890: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +const { InvalidArgumentError } = __nccwpck_require__(48045) +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(72785) +const DispatcherBase = __nccwpck_require__(74839) +const Pool = __nccwpck_require__(4634) +const Client = __nccwpck_require__(33598) +const util = __nccwpck_require__(83983) +const createRedirectInterceptor = __nccwpck_require__(38861) +const { WeakRef, FinalizationRegistry } = __nccwpck_require__(56436)() -/**/ +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') -var Buffer = (__nccwpck_require__(1867).Buffer); -/**/ +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; - } -}; +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') } - } -}; -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.s = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); -} + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; + if (connect && typeof connect !== 'function') { + connect = { ...connect } + } -StringDecoder.prototype.end = utf8End; + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) + } + }) -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; + const agent = this -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; -} + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) + } } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] + } } - return nb; + return ret } - return 0; -} -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + } + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) + } + + return dispatcher.dispatch(opts, handler) } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; + + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) + } } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; + + await Promise.all(closePromises) + } + + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) } } + + await Promise.all(destroyPromises) } } -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); +module.exports = Agent + + +/***/ }), + +/***/ 7032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { addAbortListener } = __nccwpck_require__(83983) +const { RequestAbortedError } = __nccwpck_require__(48045) + +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') + +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; } -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); -} +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; -} + if (!signal) { + return + } -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; + if (signal.aborted) { + abort(self) + return } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); -} -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); + self[kSignal] = signal + self[kListener] = () => { + abort(self) } - return r; + + addAbortListener(self[kSignal], self[kListener]) } -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; +function removeSignal (self) { + if (!self[kSignal]) { + return + } + + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; + self[kSignal].removeListener('abort', self[kListener]) } - return buf.toString('base64', i, buf.length - n); -} -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; + self[kSignal] = null + self[kListener] = null } -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); +module.exports = { + addSignal, + removeSignal } -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} /***/ }), -/***/ 4526: -/***/ ((module) => { +/***/ 29744: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; -const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; -// const octRegex = /0x[a-z0-9]+/; -// const binRegex = /0x[a-z0-9]+/; +"use strict"; -//polyfill -if (!Number.parseInt && window.parseInt) { - Number.parseInt = window.parseInt; -} -if (!Number.parseFloat && window.parseFloat) { - Number.parseFloat = window.parseFloat; -} +const { AsyncResource } = __nccwpck_require__(50852) +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(48045) +const util = __nccwpck_require__(83983) +const { addSignal, removeSignal } = __nccwpck_require__(7032) - -const consider = { - hex : true, - leadingZeros: true, - decimalPoint: "\.", - eNotation: true - //skipLike: /regex/ -}; +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } -function toNumber(str, options = {}){ - // const options = Object.assign({}, consider); - // if(opt.leadingZeros === false){ - // options.leadingZeros = false; - // }else if(opt.hex === false){ - // options.hex = false; - // } + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - options = Object.assign({}, consider, options ); - if(!str || typeof str !== "string" ) return str; - - let trimmedStr = str.trim(); - // if(trimmedStr === "0.0") return 0; - // else if(trimmedStr === "+0.0") return 0; - // else if(trimmedStr === "-0.0") return -0; + const { signal, opaque, responseHeaders } = opts - if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; - else if (options.hex && hexRegex.test(trimmedStr)) { - return Number.parseInt(trimmedStr, 16); - // } else if (options.parseOct && octRegex.test(str)) { - // return Number.parseInt(val, 8); - // }else if (options.parseBin && binRegex.test(str)) { - // return Number.parseInt(val, 2); - }else{ - //separate negative sign, leading zeros, and rest number - const match = numRegex.exec(trimmedStr); - if(match){ - const sign = match[1]; - const leadingZeros = match[2]; - let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros - //trim ending zeros for floating number - - const eNotation = match[4] || match[6]; - if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 - else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 - else{//no leading zeros or leading zeros are allowed - const num = Number(trimmedStr); - const numStr = "" + num; - if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation - if(options.eNotation) return num; - else return str; - }else if(eNotation){ //given number has enotation - if(options.eNotation) return num; - else return str; - }else if(trimmedStr.indexOf(".") !== -1){ //floating number - // const decimalPart = match[5].substr(1); - // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } - - // const p = numStr.indexOf("."); - // const givenIntPart = numStr.substr(0,p); - // const givenDecPart = numStr.substr(p+1); - if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 - else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 - else if( sign && numStr === "-"+numTrimmedByZeros) return num; - else return str; - } - - if(leadingZeros){ - // if(numTrimmedByZeros === numStr){ - // if(options.leadingZeros) return num; - // else return str; - // }else return str; - if(numTrimmedByZeros === numStr) return num; - else if(sign+numTrimmedByZeros === numStr) return num; - else return str; - } + super('UNDICI_CONNECT') - if(trimmedStr === numStr) return num; - else if(trimmedStr === sign+numStr) return num; - // else{ - // //number with +/- sign - // trimmedStr.test(/[-+][0-9]); + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null - // } - return str; - } - // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; - - }else{ //non-numeric string - return str; - } + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders () { + throw new SocketError('bad connect', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + removeSignal(this) + + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } + } } -/** - * - * @param {string} numStr without leading zeros - * @returns - */ -function trimZeros(numStr){ - if(numStr && numStr.indexOf(".") !== -1){//float - numStr = numStr.replace(/0+$/, ""); //remove ending zeros - if(numStr === ".") numStr = "0"; - else if(numStr[0] === ".") numStr = "0"+numStr; - else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); - return numStr; +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err } - return numStr; + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } } -module.exports = toNumber + +module.exports = connect /***/ }), -/***/ 1099: -/***/ ((module) => { +/***/ 28752: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = function stubs(obj, method, cfg, stub) { - if (!obj || !method || !obj[method]) - throw new Error('You must provide an object and a key for an existing method') +const { + Readable, + Duplex, + PassThrough +} = __nccwpck_require__(12781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(48045) +const util = __nccwpck_require__(83983) +const { AsyncResource } = __nccwpck_require__(50852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) +const assert = __nccwpck_require__(39491) - if (!stub) { - stub = cfg - cfg = {} - } +const kResume = Symbol('resume') - stub = stub || function() {} +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) - cfg.callthrough = cfg.callthrough || false - cfg.calls = cfg.calls || 0 + this[kResume] = null + } - var norevert = cfg.calls === 0 + _read () { + const { [kResume]: resume } = this - var cached = obj[method].bind(obj) + if (resume) { + this[kResume] = null + resume() + } + } - obj[method] = function() { - var args = [].slice.call(arguments) - var returnVal + _destroy (err, callback) { + this._read() - if (cfg.callthrough) - returnVal = cached.apply(obj, args) + callback(err) + } +} - returnVal = stub.apply(obj, args) || returnVal +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume + } - if (!norevert && --cfg.calls === 0) - obj[method] = cached + _read () { + this[kResume]() + } - return returnVal + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + callback(err) } } +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') + } + + const { signal, method, opaque, onInfo, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_PIPELINE') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null + + this.req = new PipelineRequest().on('error', util.nop) + + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this + + if (body && body.resume) { + body.resume() + } + }, + write: (chunk, encoding, callback) => { + const { req } = this -/***/ }), + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback + } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this -/***/ 9318: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() + } -"use strict"; + if (abort && err) { + abort() + } -const os = __nccwpck_require__(2037); -const tty = __nccwpck_require__(6224); -const hasFlag = __nccwpck_require__(1621); + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) -const {env} = process; + removeSignal(this) -let forceColor; -if (hasFlag('no-color') || - hasFlag('no-colors') || - hasFlag('color=false') || - hasFlag('color=never')) { - forceColor = 0; -} else if (hasFlag('color') || - hasFlag('colors') || - hasFlag('color=true') || - hasFlag('color=always')) { - forceColor = 1; -} + callback(err) + } + }).on('prefinish', () => { + const { req } = this -if ('FORCE_COLOR' in env) { - if (env.FORCE_COLOR === 'true') { - forceColor = 1; - } else if (env.FORCE_COLOR === 'false') { - forceColor = 0; - } else { - forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); - } -} + // Node < 15 does not call _final in same tick. + req.push(null) + }) -function translateLevel(level) { - if (level === 0) { - return false; - } + this.res = null - return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 - }; -} + addSignal(this, signal) + } -function supportsColor(haveStream, streamIsTTY) { - if (forceColor === 0) { - return 0; - } + onConnect (abort, context) { + const { ret, res } = this - if (hasFlag('color=16m') || - hasFlag('color=full') || - hasFlag('color=truecolor')) { - return 3; - } + assert(!res, 'pipeline cannot be retried') - if (hasFlag('color=256')) { - return 2; - } + if (ret.destroyed) { + throw new RequestAbortedError() + } - if (haveStream && !streamIsTTY && forceColor === undefined) { - return 0; - } + this.abort = abort + this.context = context + } - const min = forceColor || 0; + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this - if (env.TERM === 'dumb') { - return min; - } + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return + } - if (process.platform === 'win32') { - // Windows 10 build 10586 is the first Windows release that supports 256 colors. - // Windows 10 build 14931 is the first release that supports 16m/TrueColor. - const osRelease = os.release().split('.'); - if ( - Number(osRelease[0]) >= 10 && - Number(osRelease[2]) >= 10586 - ) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; - } + this.res = new PipelineResponse(resume) - return 1; - } + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err + } - if ('CI' in env) { - if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { - return 1; - } + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') + } - return min; - } + body + .on('data', (chunk) => { + const { ret, body } = this - if ('TEAMCITY_VERSION' in env) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; - } + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this - if (env.COLORTERM === 'truecolor') { - return 3; - } + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this - if ('TERM_PROGRAM' in env) { - const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + ret.push(null) + }) + .on('close', () => { + const { ret } = this - switch (env.TERM_PROGRAM) { - case 'iTerm.app': - return version >= 3 ? 3 : 2; - case 'Apple_Terminal': - return 2; - // No default - } - } + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) - if (/-256(color)?$/i.test(env.TERM)) { - return 2; - } + this.body = body + } - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { - return 1; - } + onData (chunk) { + const { res } = this + return res.push(chunk) + } - if ('COLORTERM' in env) { - return 1; - } + onComplete (trailers) { + const { res } = this + res.push(null) + } - return min; + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) + } } -function getSupportLevel(stream) { - const level = supportsColor(stream, stream && stream.isTTY); - return translateLevel(level); +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } } -module.exports = { - supportsColor: getSupportLevel, - stdout: translateLevel(supportsColor(true, tty.isatty(1))), - stderr: translateLevel(supportsColor(true, tty.isatty(2))) -}; +module.exports = pipeline /***/ }), -/***/ 4920: -/***/ ((__unused_webpack_module, exports) => { +/***/ 55448: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/** - * @license - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; -/** - * @class TeenyStatisticsWarning - * @extends Error - * @description While an error, is used for emitting warnings when - * meeting certain configured thresholds. - * @see process.emitWarning - */ -class TeenyStatisticsWarning extends Error { - /** - * @param {string} message - */ - constructor(message) { - super(message); - this.threshold = 0; - this.type = ''; - this.value = 0; - this.name = this.constructor.name; - Error.captureStackTrace(this, this.constructor); - } -} -exports.TeenyStatisticsWarning = TeenyStatisticsWarning; -TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; -/** - * @class TeenyStatistics - * @description Maintain various statistics internal to teeny-request. Tracking - * is not automatic and must be instrumented within teeny-request. - */ -class TeenyStatistics { - /** - * @param {TeenyStatisticsOptions} [opts] - */ - constructor(opts) { - /** - * @type {number} - * @private - * @default 0 - */ - this._concurrentRequests = 0; - /** - * @type {boolean} - * @private - * @default false - */ - this._didConcurrentRequestWarn = false; - this._options = TeenyStatistics._prepareOptions(opts); - } - /** - * Returns a copy of the current options. - * @return {TeenyStatisticsOptions} - */ - getOptions() { - return Object.assign({}, this._options); - } - /** - * Change configured statistics options. This will not preserve unspecified - * options that were previously specified, i.e. this is a reset of options. - * @param {TeenyStatisticsOptions} [opts] - * @returns {TeenyStatisticsConfig} The previous options. - * @see _prepareOptions - */ - setOptions(opts) { - const oldOpts = this._options; - this._options = TeenyStatistics._prepareOptions(opts); - return oldOpts; - } - /** - * @readonly - * @return {TeenyStatisticsCounters} - */ - get counters() { - return { - concurrentRequests: this._concurrentRequests, - }; - } - /** - * @description Should call this right before making a request. - */ - requestStarting() { - this._concurrentRequests++; - if (this._options.concurrentRequests > 0 && - this._concurrentRequests >= this._options.concurrentRequests && - !this._didConcurrentRequestWarn) { - this._didConcurrentRequestWarn = true; - const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + - this._concurrentRequests + - ' requests in-flight, which exceeds the configured threshold of ' + - this._options.concurrentRequests + - '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + - 'variable or the concurrentRequests option of teeny-request to ' + - 'increase or disable (0) this warning.'); - warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; - warning.value = this._concurrentRequests; - warning.threshold = this._options.concurrentRequests; - process.emitWarning(warning); - } - } - /** - * @description When using `requestStarting`, call this after the request - * has finished. - */ - requestFinished() { - // TODO negative? - this._concurrentRequests--; - } - /** - * Configuration Precedence: - * 1. Dependency inversion via defined option. - * 2. Global numeric environment variable. - * 3. Built-in default. - * This will not preserve unspecified options previously specified. - * @param {TeenyStatisticsOptions} [opts] - * @returns {TeenyStatisticsOptions} - * @private - */ - static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { - let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; - const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); - if (diConcurrentRequests !== undefined) { - concurrentRequests = diConcurrentRequests; - } - else if (!Number.isNaN(envConcurrentRequests)) { - concurrentRequests = envConcurrentRequests; - } - return { concurrentRequests }; + +const Readable = __nccwpck_require__(73858) +const { + InvalidArgumentError, + RequestAbortedError +} = __nccwpck_require__(48045) +const util = __nccwpck_require__(83983) +const { getResolveErrorBodyCallback } = __nccwpck_require__(77474) +const { AsyncResource } = __nccwpck_require__(50852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) + +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } -} -exports.TeenyStatistics = TeenyStatistics; -/** - * @description A default threshold representing when to warn about excessive - * in-flight/concurrent requests. - * @type {number} - * @static - * @readonly - * @default 5000 - */ -TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; -//# sourceMappingURL=TeenyStatistics.js.map -/***/ }), + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts -/***/ 446: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -"use strict"; + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } -/** - * @license - * Copyright 2019 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getAgent = exports.pool = void 0; -const http_1 = __nccwpck_require__(3685); -const https_1 = __nccwpck_require__(5687); -// eslint-disable-next-line node/no-deprecated-api -const url_1 = __nccwpck_require__(7310); -exports.pool = new Map(); -/** - * Determines if a proxy should be considered based on the environment. - * - * @param uri The request uri - * @returns {boolean} - */ -function shouldUseProxyForURI(uri) { - const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; - if (!noProxyEnv) { - return true; - } - const givenURI = new URL(uri); - for (const noProxyRaw of noProxyEnv.split(',')) { - const noProxy = noProxyRaw.trim(); - if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { - return false; - } - else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { - const noProxyWildcard = noProxy.replace(/^\*\./, '.'); - if (givenURI.hostname.endsWith(noProxyWildcard)) { - return false; - } - } - } - return true; -} -/** - * Returns a custom request Agent if one is found, otherwise returns undefined - * which will result in the global http(s) Agent being used. - * @private - * @param {string} uri The request uri - * @param {Options} reqOpts The request options - * @returns {HttpAnyAgent|undefined} - */ -function getAgent(uri, reqOpts) { - const isHttp = uri.startsWith('http://'); - const proxy = reqOpts.proxy || - process.env.HTTP_PROXY || - process.env.http_proxy || - process.env.HTTPS_PROXY || - process.env.https_proxy; - const poolOptions = Object.assign({}, reqOpts.pool); - const manuallyProvidedProxy = !!reqOpts.proxy; - const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); - if (proxy && shouldUseProxy) { - // tslint:disable-next-line variable-name - const Agent = isHttp - ? __nccwpck_require__(3764) - : __nccwpck_require__(6202); - const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; - return new Agent(proxyOpts); + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err } - let key = isHttp ? 'http' : 'https'; - if (reqOpts.forever) { - key += ':forever'; - if (!exports.pool.has(key)) { - // tslint:disable-next-line variable-name - const Agent = isHttp ? http_1.Agent : https_1.Agent; - exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); - } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) } - return exports.pool.get(key); -} -exports.getAgent = getAgent; -//# sourceMappingURL=agents.js.map -/***/ }), + addSignal(this, signal) + } -/***/ 6886: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } -"use strict"; + this.abort = abort + this.context = context + } -/** - * @license - * Copyright 2018 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.teenyRequest = exports.RequestError = void 0; -const node_fetch_1 = __nccwpck_require__(467); -const stream_1 = __nccwpck_require__(2781); -const uuid = __nccwpck_require__(5840); -const agents_1 = __nccwpck_require__(446); -const TeenyStatistics_1 = __nccwpck_require__(4920); -// eslint-disable-next-line @typescript-eslint/no-var-requires -const streamEvents = __nccwpck_require__(9626); -class RequestError extends Error { -} -exports.RequestError = RequestError; -/** - * Convert options from Request to Fetch format - * @private - * @param reqOpts Request options - */ -function requestToFetchOptions(reqOpts) { - const options = { - method: reqOpts.method || 'GET', - ...(reqOpts.timeout && { timeout: reqOpts.timeout }), - ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), - }; - if (typeof reqOpts.json === 'object') { - // Add Content-type: application/json header - reqOpts.headers = reqOpts.headers || {}; - reqOpts.headers['Content-Type'] = 'application/json'; - // Set body to JSON representation of value - options.body = JSON.stringify(reqOpts.json); - } - else { - if (Buffer.isBuffer(reqOpts.body)) { - options.body = reqOpts.body; - } - else if (typeof reqOpts.body !== 'string') { - options.body = JSON.stringify(reqOpts.body); - } - else { - options.body = reqOpts.body; - } - } - // eslint-disable-next-line @typescript-eslint/no-explicit-any - options.headers = reqOpts.headers; - let uri = (reqOpts.uri || - reqOpts.url); - if (!uri) { - throw new Error('Missing uri or url in reqOpts.'); - } - if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const qs = __nccwpck_require__(3477); - const params = qs.stringify(reqOpts.qs); - uri = uri + '?' + params; - } - options.agent = (0, agents_1.getAgent)(uri, reqOpts); - return { uri, options }; -} -/** - * Convert a response from `fetch` to `request` format. - * @private - * @param opts The `request` options used to create the request. - * @param res The Fetch response - * @returns A `request` response object - */ -function fetchToRequestResponse(opts, res) { - const request = {}; - request.agent = opts.agent || false; - request.headers = (opts.headers || {}); - request.href = res.url; - // headers need to be converted from a map to an obj - const resHeaders = {}; - res.headers.forEach((value, key) => (resHeaders[key] = value)); - const response = Object.assign(res.body, { - statusCode: res.status, - statusMessage: res.statusText, - request, - body: res.body, - headers: resHeaders, - toJSON: () => ({ headers: resHeaders }), - }); - return response; -} -/** - * Create POST body from two parts as multipart/related content-type - * @private - * @param boundary - * @param multipart - */ -function createMultipartStream(boundary, multipart) { - const finale = `--${boundary}--`; - const stream = new stream_1.PassThrough(); - for (const part of multipart) { - const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; - stream.write(preamble); - if (typeof part.body === 'string') { - stream.write(part.body); - stream.write('\r\n'); - } - else { - part.body.pipe(stream, { end: false }); - part.body.on('end', () => { - stream.write('\r\n'); - stream.write(finale); - stream.end(); - }); - } - } - return stream; -} -function teenyRequest(reqOpts, callback) { - const { uri, options } = requestToFetchOptions(reqOpts); - const multipart = reqOpts.multipart; - if (reqOpts.multipart && multipart.length === 2) { - if (!callback) { - // TODO: add support for multipart uploads through streaming - throw new Error('Multipart without callback is not implemented.'); - } - const boundary = uuid.v4(); - options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; - options.body = createMultipartStream(boundary, multipart); - // Multipart upload - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - const header = res.headers.get('content-type'); - const response = fetchToRequestResponse(options, res); - const body = response.body; - if (header === 'application/json' || - header === 'application/json; charset=utf-8') { - res.json().then(json => { - response.body = json; - callback(null, response, json); - }, (err) => { - callback(err, response, body); - }); - return; - } - res.text().then(text => { - response.body = text; - callback(null, response, text); - }, err => { - callback(err, response, body); - }); - }, err => { - teenyRequest.stats.requestFinished(); - callback(err, null, null); - }); - return; + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return } - if (callback === undefined) { - // Stream mode - const requestStream = streamEvents(new stream_1.PassThrough()); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let responseStream; - requestStream.once('reading', () => { - if (responseStream) { - (0, stream_1.pipeline)(responseStream, requestStream, () => { }); - } - else { - requestStream.once('response', () => { - (0, stream_1.pipeline)(responseStream, requestStream, () => { }); - }); - } - }); - options.compress = false; - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - responseStream = res.body; - responseStream.on('error', (err) => { - requestStream.emit('error', err); - }); - const response = fetchToRequestResponse(options, res); - requestStream.emit('response', response); - }, err => { - teenyRequest.stats.requestFinished(); - requestStream.emit('error', err); - }); - // fetch doesn't supply the raw HTTP stream, instead it - // returns a PassThrough piped from the HTTP response - // stream. - return requestStream; + + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) + + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } } - // GET or POST with callback - teenyRequest.stats.requestStarting(); - (0, node_fetch_1.default)(uri, options).then(res => { - teenyRequest.stats.requestFinished(); - const header = res.headers.get('content-type'); - const response = fetchToRequestResponse(options, res); - const body = response.body; - if (header === 'application/json' || - header === 'application/json; charset=utf-8') { - if (response.statusCode === 204) { - // Probably a DELETE - callback(null, response, body); - return; - } - res.json().then(json => { - response.body = json; - callback(null, response, json); - }, err => { - callback(err, response, body); - }); - return; - } - res.text().then(text => { - const response = fetchToRequestResponse(options, res); - response.body = text; - callback(null, response, text); - }, err => { - callback(err, response, body); - }); - }, err => { - teenyRequest.stats.requestFinished(); - callback(err, null, null); - }); - return; -} -exports.teenyRequest = teenyRequest; -teenyRequest.defaults = (defaults) => { - return (reqOpts, callback) => { - const opts = { ...defaults, ...reqOpts }; - if (callback === undefined) { - return teenyRequest(opts); - } - teenyRequest(opts, callback); - }; -}; -/** - * Single instance of an interface for keeping track of things. - */ -teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); -teenyRequest.resetStats = () => { - teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); -}; -//# sourceMappingURL=index.js.map + } -/***/ }), + onData (chunk) { + const { res } = this + return res.push(chunk) + } -/***/ 6503: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + onComplete (trailers) { + const { res } = this -"use strict"; + removeSignal(this) -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const events_1 = __nccwpck_require__(2361); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const promisify_1 = __importDefault(__nccwpck_require__(5947)); -const debug = debug_1.default('agent-base'); -function isAgent(v) { - return Boolean(v) && typeof v.addRequest === 'function'; -} -function isSecureEndpoint() { - const { stack } = new Error(); - if (typeof stack !== 'string') - return false; - return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); -} -function createAgent(callback, opts) { - return new createAgent.Agent(callback, opts); -} -(function (createAgent) { - /** - * Base `http.Agent` implementation. - * No pooling/keep-alive is implemented by default. - * - * @param {Function} callback - * @api public - */ - class Agent extends events_1.EventEmitter { - constructor(callback, _opts) { - super(); - let opts = _opts; - if (typeof callback === 'function') { - this.callback = callback; - } - else if (callback) { - opts = callback; - } - // Timeout for the socket to be returned from the callback - this.timeout = null; - if (opts && typeof opts.timeout === 'number') { - this.timeout = opts.timeout; - } - // These aren't actually used by `agent-base`, but are required - // for the TypeScript definition files in `@types/node` :/ - this.maxFreeSockets = 1; - this.maxSockets = 1; - this.maxTotalSockets = Infinity; - this.sockets = {}; - this.freeSockets = {}; - this.requests = {}; - this.options = {}; - } - get defaultPort() { - if (typeof this.explicitDefaultPort === 'number') { - return this.explicitDefaultPort; - } - return isSecureEndpoint() ? 443 : 80; - } - set defaultPort(v) { - this.explicitDefaultPort = v; - } - get protocol() { - if (typeof this.explicitProtocol === 'string') { - return this.explicitProtocol; - } - return isSecureEndpoint() ? 'https:' : 'http:'; - } - set protocol(v) { - this.explicitProtocol = v; - } - callback(req, opts, fn) { - throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); - } - /** - * Called by node-core's "_http_client.js" module when creating - * a new HTTP request with this Agent instance. - * - * @api public - */ - addRequest(req, _opts) { - const opts = Object.assign({}, _opts); - if (typeof opts.secureEndpoint !== 'boolean') { - opts.secureEndpoint = isSecureEndpoint(); - } - if (opts.host == null) { - opts.host = 'localhost'; - } - if (opts.port == null) { - opts.port = opts.secureEndpoint ? 443 : 80; - } - if (opts.protocol == null) { - opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; - } - if (opts.host && opts.path) { - // If both a `host` and `path` are specified then it's most - // likely the result of a `url.parse()` call... we need to - // remove the `path` portion so that `net.connect()` doesn't - // attempt to open that as a unix socket file. - delete opts.path; - } - delete opts.agent; - delete opts.hostname; - delete opts._defaultAgent; - delete opts.defaultPort; - delete opts.createConnection; - // Hint to use "Connection: close" - // XXX: non-documented `http` module API :( - req._last = true; - req.shouldKeepAlive = false; - let timedOut = false; - let timeoutId = null; - const timeoutMs = opts.timeout || this.timeout; - const onerror = (err) => { - if (req._hadError) - return; - req.emit('error', err); - // For Safety. Some additional errors might fire later on - // and we need to make sure we don't double-fire the error event. - req._hadError = true; - }; - const ontimeout = () => { - timeoutId = null; - timedOut = true; - const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); - err.code = 'ETIMEOUT'; - onerror(err); - }; - const callbackError = (err) => { - if (timedOut) - return; - if (timeoutId !== null) { - clearTimeout(timeoutId); - timeoutId = null; - } - onerror(err); - }; - const onsocket = (socket) => { - if (timedOut) - return; - if (timeoutId != null) { - clearTimeout(timeoutId); - timeoutId = null; - } - if (isAgent(socket)) { - // `socket` is actually an `http.Agent` instance, so - // relinquish responsibility for this `req` to the Agent - // from here on - debug('Callback returned another Agent instance %o', socket.constructor.name); - socket.addRequest(req, opts); - return; - } - if (socket) { - socket.once('free', () => { - this.freeSocket(socket, opts); - }); - req.onSocket(socket); - return; - } - const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); - onerror(err); - }; - if (typeof this.callback !== 'function') { - onerror(new Error('`callback` is not defined')); - return; - } - if (!this.promisifiedCallback) { - if (this.callback.length >= 3) { - debug('Converting legacy callback function to promise'); - this.promisifiedCallback = promisify_1.default(this.callback); - } - else { - this.promisifiedCallback = this.callback; - } - } - if (typeof timeoutMs === 'number' && timeoutMs > 0) { - timeoutId = setTimeout(ontimeout, timeoutMs); - } - if ('port' in opts && typeof opts.port !== 'number') { - opts.port = Number(opts.port); - } - try { - debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); - Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); - } - catch (err) { - Promise.reject(err).catch(callbackError); - } - } - freeSocket(socket, opts) { - debug('Freeing socket %o %o', socket.constructor.name, opts); - socket.destroy(); - } - destroy() { - debug('Destroying agent %o', this.constructor.name); - } + util.parseHeaders(trailers, this.trailers) + + res.push(null) + } + + onError (err) { + const { res, callback, body, opaque } = this + + removeSignal(this) + + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } - createAgent.Agent = Agent; - // So that `instanceof` works correctly - createAgent.prototype = createAgent.Agent.prototype; -})(createAgent || (createAgent = {})); -module.exports = createAgent; -//# sourceMappingURL=index.js.map -/***/ }), + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) + } -/***/ 5947: -/***/ ((__unused_webpack_module, exports) => { + if (body) { + this.body = null + util.destroy(body, err) + } + } +} -"use strict"; +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -function promisify(fn) { - return function (req, opts) { - return new Promise((resolve, reject) => { - fn.call(this, req, opts, (err, rtn) => { - if (err) { - reject(err); - } - else { - resolve(rtn); - } - }); - }); - }; + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } } -exports["default"] = promisify; -//# sourceMappingURL=promisify.js.map + +module.exports = request +module.exports.RequestHandler = RequestHandler + /***/ }), -/***/ 8455: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 75395: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const net_1 = __importDefault(__nccwpck_require__(1808)); -const tls_1 = __importDefault(__nccwpck_require__(4404)); -const url_1 = __importDefault(__nccwpck_require__(7310)); -const assert_1 = __importDefault(__nccwpck_require__(9491)); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const agent_base_1 = __nccwpck_require__(6503); -const parse_proxy_response_1 = __importDefault(__nccwpck_require__(4037)); -const debug = debug_1.default('https-proxy-agent:agent'); -/** - * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to - * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. - * - * Outgoing HTTP requests are first tunneled through the proxy server using the - * `CONNECT` HTTP request method to establish a connection to the proxy server, - * and then the proxy server connects to the destination target and issues the - * HTTP request from the proxy server. - * - * `https:` requests have their socket connection upgraded to TLS once - * the connection to the proxy server has been established. - * - * @api public - */ -class HttpsProxyAgent extends agent_base_1.Agent { - constructor(_opts) { - let opts; - if (typeof _opts === 'string') { - opts = url_1.default.parse(_opts); - } - else { - opts = _opts; - } - if (!opts) { - throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); - } - debug('creating new HttpsProxyAgent instance: %o', opts); - super(opts); - const proxy = Object.assign({}, opts); - // If `true`, then connect to the proxy server over TLS. - // Defaults to `false`. - this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); - // Prefer `hostname` over `host`, and set the `port` if needed. - proxy.host = proxy.hostname || proxy.host; - if (typeof proxy.port === 'string') { - proxy.port = parseInt(proxy.port, 10); - } - if (!proxy.port && proxy.host) { - proxy.port = this.secureProxy ? 443 : 80; - } - // ALPN is supported by Node.js >= v5. - // attempt to negotiate http/1.1 for proxy servers that support http/2 - if (this.secureProxy && !('ALPNProtocols' in proxy)) { - proxy.ALPNProtocols = ['http 1.1']; - } - if (proxy.host && proxy.path) { - // If both a `host` and `path` are specified then it's most likely - // the result of a `url.parse()` call... we need to remove the - // `path` portion so that `net.connect()` doesn't attempt to open - // that as a Unix socket file. - delete proxy.path; - delete proxy.pathname; - } - this.proxy = proxy; + +const { finished, PassThrough } = __nccwpck_require__(12781) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(48045) +const util = __nccwpck_require__(83983) +const { getResolveErrorBodyCallback } = __nccwpck_require__(77474) +const { AsyncResource } = __nccwpck_require__(50852) +const { addSignal, removeSignal } = __nccwpck_require__(7032) + +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - /** - * Called when the node-core HTTP client library is creating a - * new HTTP request. - * - * @api protected - */ - callback(req, opts) { - return __awaiter(this, void 0, void 0, function* () { - const { proxy, secureProxy } = this; - // Create a socket connection to the proxy server. - let socket; - if (secureProxy) { - debug('Creating `tls.Socket`: %o', proxy); - socket = tls_1.default.connect(proxy); - } - else { - debug('Creating `net.Socket`: %o', proxy); - socket = net_1.default.connect(proxy); - } - const headers = Object.assign({}, proxy.headers); - const hostname = `${opts.host}:${opts.port}`; - let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; - // Inject the `Proxy-Authorization` header if necessary. - if (proxy.auth) { - headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; - } - // The `Host` header should only include the port - // number when it is not the default port. - let { host, port, secureEndpoint } = opts; - if (!isDefaultPort(port, secureEndpoint)) { - host += `:${port}`; - } - headers.Host = host; - headers.Connection = 'close'; - for (const name of Object.keys(headers)) { - payload += `${name}: ${headers[name]}\r\n`; - } - const proxyResponsePromise = parse_proxy_response_1.default(socket); - socket.write(`${payload}\r\n`); - const { statusCode, buffered } = yield proxyResponsePromise; - if (statusCode === 200) { - req.once('socket', resume); - if (opts.secureEndpoint) { - // The proxy is connecting to a TLS server, so upgrade - // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); - const servername = opts.servername || opts.host; - return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, - servername })); - } - return socket; - } - // Some other status code that's not 200... need to re-play the HTTP - // header "data" events onto the socket once the HTTP machinery is - // attached so that the node core `http` can parse and handle the - // error status code. - // Close the original socket, and a new "fake" socket is returned - // instead, so that the proxy doesn't get the HTTP request - // written to it (which may contain `Authorization` headers or other - // sensitive data). - // - // See: https://hackerone.com/reports/541502 - socket.destroy(); - const fakeSocket = new net_1.default.Socket({ writable: false }); - fakeSocket.readable = true; - // Need to wait for the "socket" event to re-play the "data" events. - req.once('socket', (s) => { - debug('replaying proxy buffer for failed request'); - assert_1.default(s.listenerCount('data') > 0); - // Replay the "buffered" Buffer onto the fake `socket`, since at - // this point the HTTP module machinery has been hooked up for - // the user. - s.push(buffered); - s.push(null); - }); - return fakeSocket; - }); + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err } -} -exports["default"] = HttpsProxyAgent; -function resume(socket) { - socket.resume(); -} -function isDefaultPort(port, secure) { - return Boolean((!secure && port === 80) || (secure && port === 443)); -} -function isHTTPS(protocol) { - return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; -} -function omit(obj, ...keys) { - const ret = {}; - let key; - for (key in obj) { - if (!keys.includes(key)) { - ret[key] = obj[key]; - } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) } - return ret; -} -//# sourceMappingURL=agent.js.map -/***/ }), + addSignal(this, signal) + } -/***/ 6202: -/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } -"use strict"; + this.abort = abort + this.context = context + } -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -const agent_1 = __importDefault(__nccwpck_require__(8455)); -function createHttpsProxyAgent(opts) { - return new agent_1.default(opts); -} -(function (createHttpsProxyAgent) { - createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; - createHttpsProxyAgent.prototype = agent_1.default.prototype; -})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); -module.exports = createHttpsProxyAgent; -//# sourceMappingURL=index.js.map + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this -/***/ }), + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) -/***/ 4037: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } -"use strict"; + this.factory = null -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const debug_1 = __importDefault(__nccwpck_require__(8237)); -const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); -function parseProxyResponse(socket) { - return new Promise((resolve, reject) => { - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once('readable', read); - } - function cleanup() { - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('close', onclose); - socket.removeListener('readable', read); - } - function onclose(err) { - debug('onclose had error %o', err); - } - function onend() { - debug('onend'); - } - function onerror(err) { - cleanup(); - debug('onerror %o', err); - reject(err); + let res + + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() + + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } + + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) + + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } + + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this + + this.res = null + if (err || !res.readable) { + util.destroy(res, err) } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf('\r\n\r\n'); - if (endOfHeaders === -1) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - read(); - return; - } - const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); - const statusCode = +firstLine.split(' ')[1]; - debug('got proxy server response: %o', firstLine); - resolve({ - statusCode, - buffered - }); + + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) + + if (err) { + abort() } - socket.on('error', onerror); - socket.on('close', onclose); - socket.on('end', onend); - read(); - }); -} -exports["default"] = parseProxyResponse; -//# sourceMappingURL=parse-proxy-response.js.map + }) + } -/***/ }), + res.on('drain', resume) -/***/ 4256: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.res = res -"use strict"; + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain + return needDrain !== true + } -var punycode = __nccwpck_require__(5477); -var mappingTable = __nccwpck_require__(2020); + onData (chunk) { + const { res } = this -var PROCESSING_OPTIONS = { - TRANSITIONAL: 0, - NONTRANSITIONAL: 1 -}; + return res ? res.write(chunk) : true + } -function normalize(str) { // fix bug in v8 - return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); -} + onComplete (trailers) { + const { res } = this -function findStatus(val) { - var start = 0; - var end = mappingTable.length - 1; + removeSignal(this) - while (start <= end) { - var mid = Math.floor((start + end) / 2); + if (!res) { + return + } - var target = mappingTable[mid]; - if (target[0][0] <= val && target[0][1] >= val) { - return target; - } else if (target[0][0] > val) { - end = mid - 1; - } else { - start = mid + 1; + this.trailers = util.parseHeaders(trailers) + + res.end() + } + + onError (err) { + const { res, callback, opaque, body } = this + + removeSignal(this) + + this.factory = null + + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) } - - return null; } -var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; +module.exports = stream -function countSymbols(string) { - return string - // replace every surrogate pair with a BMP symbol - .replace(regexAstralSymbols, '_') - // then get the length - .length; -} -function mapChars(domain_name, useSTD3, processing_option) { - var hasError = false; - var processed = ""; +/***/ }), - var len = countSymbols(domain_name); - for (var i = 0; i < len; ++i) { - var codePoint = domain_name.codePointAt(i); - var status = findStatus(codePoint); +/***/ 36923: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - switch (status[1]) { - case "disallowed": - hasError = true; - processed += String.fromCodePoint(codePoint); - break; - case "ignored": - break; - case "mapped": - processed += String.fromCodePoint.apply(String, status[2]); - break; - case "deviation": - if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { - processed += String.fromCodePoint.apply(String, status[2]); - } else { - processed += String.fromCodePoint(codePoint); - } - break; - case "valid": - processed += String.fromCodePoint(codePoint); - break; - case "disallowed_STD3_mapped": - if (useSTD3) { - hasError = true; - processed += String.fromCodePoint(codePoint); - } else { - processed += String.fromCodePoint.apply(String, status[2]); - } - break; - case "disallowed_STD3_valid": - if (useSTD3) { - hasError = true; - } +"use strict"; - processed += String.fromCodePoint(codePoint); - break; + +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(48045) +const { AsyncResource } = __nccwpck_require__(50852) +const util = __nccwpck_require__(83983) +const { addSignal, removeSignal } = __nccwpck_require__(7032) +const assert = __nccwpck_require__(39491) + +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') } - } - return { - string: processed, - error: hasError - }; -} + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } -var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + const { signal, opaque, responseHeaders } = opts -function validateLabel(label, processing_option) { - if (label.substr(0, 4) === "xn--") { - label = punycode.toUnicode(label); - processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; - } + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } - var error = false; + super('UNDICI_UPGRADE') - if (normalize(label) !== label || - (label[3] === "-" && label[4] === "-") || - label[0] === "-" || label[label.length - 1] === "-" || - label.indexOf(".") !== -1 || - label.search(combiningMarksRegex) === 0) { - error = true; + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + + addSignal(this, signal) } - var len = countSymbols(label); - for (var i = 0; i < len; ++i) { - var status = findStatus(label.codePointAt(i)); - if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || - (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && - status[1] !== "valid" && status[1] !== "deviation")) { - error = true; - break; + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() } + + this.abort = abort + this.context = null } - return { - label: label, - error: error - }; -} + onHeaders () { + throw new SocketError('bad upgrade', null) + } -function processing(domain_name, useSTD3, processing_option) { - var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this - var labels = result.string.split("."); - for (var i = 0; i < labels.length; ++i) { - try { - var validation = validateLabel(labels[i]); - labels[i] = validation.label; - result.error = result.error || validation.error; - } catch(e) { - result.error = true; - } - } + assert.strictEqual(statusCode, 101) - return { - string: labels.join("."), - error: result.error - }; -} + removeSignal(this) -module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { - var result = processing(domain_name, useSTD3, processing_option); - var labels = result.string.split("."); - labels = labels.map(function(l) { - try { - return punycode.toASCII(l); - } catch(e) { - result.error = true; - return l; - } - }); + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) + } - if (verifyDnsLength) { - var total = labels.slice(0, labels.length - 1).join(".").length; - if (total.length > 253 || total.length === 0) { - result.error = true; - } + onError (err) { + const { callback, opaque } = this - for (var i=0; i < labels.length; ++i) { - if (labels.length > 63 || labels.length === 0) { - result.error = true; - break; - } + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) } } +} - if (result.error) return null; - return labels.join("."); -}; - -module.exports.toUnicode = function(domain_name, useSTD3) { - var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } - return { - domain: result.string, - error: result.error - }; -}; + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} -module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; +module.exports = upgrade /***/ }), -/***/ 4294: +/***/ 44059: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(4219); - - -/***/ }), +"use strict"; -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +module.exports.request = __nccwpck_require__(55448) +module.exports.stream = __nccwpck_require__(75395) +module.exports.pipeline = __nccwpck_require__(28752) +module.exports.upgrade = __nccwpck_require__(36923) +module.exports.connect = __nccwpck_require__(29744) -var net = __nccwpck_require__(1808); -var tls = __nccwpck_require__(4404); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var events = __nccwpck_require__(2361); -var assert = __nccwpck_require__(9491); -var util = __nccwpck_require__(3837); +/***/ }), +/***/ 73858: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; +"use strict"; +// Ported from https://github.com/nodejs/undici/pull/907 -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} +const assert = __nccwpck_require__(39491) +const { Readable } = __nccwpck_require__(12781) +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(48045) +const util = __nccwpck_require__(83983) +const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(83983) -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} +let Blob -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') +const noop = () => {} -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); + this._readableState.dataEmitted = false -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false } - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this + } - function onFree() { - self.emit('free', socket, options); + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() } - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); + if (err) { + this[kAbort]() } - }); -}; -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + return super.destroy(err) + } - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); + return super.emit(ev, ...args) } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; + addListener (ev, ...args) { + return this.on(ev, ...args) } - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + removeListener (ev, ...args) { + return this.off(ev, ...args) + } - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); + return super.push(chunk) } - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') } -}; -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') } - this.sockets.splice(pos, 1); - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') } -}; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() + } + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] } - return host; // for v0.11 or later -} -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal + + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) } } - } - return target; -} - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); + if (this.closed) { + return Promise.resolve(null) } - console.error.apply(console, args); + + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) } -} else { - debug = function() {}; } -exports.debug = debug; // for test +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} -/***/ }), +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} -/***/ 1773: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') + } -"use strict"; + assert(!stream[kConsume]) + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } -const Client = __nccwpck_require__(3598) -const Dispatcher = __nccwpck_require__(412) -const errors = __nccwpck_require__(8045) -const Pool = __nccwpck_require__(4634) -const BalancedPool = __nccwpck_require__(7931) -const Agent = __nccwpck_require__(7890) -const util = __nccwpck_require__(3983) -const { InvalidArgumentError } = errors -const api = __nccwpck_require__(4059) -const buildConnector = __nccwpck_require__(2067) -const MockClient = __nccwpck_require__(8687) -const MockAgent = __nccwpck_require__(6771) -const MockPool = __nccwpck_require__(6193) -const mockErrors = __nccwpck_require__(888) -const ProxyAgent = __nccwpck_require__(7858) -const RetryHandler = __nccwpck_require__(2286) -const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892) -const DecoratorHandler = __nccwpck_require__(6930) -const RedirectHandler = __nccwpck_require__(2860) -const createRedirectInterceptor = __nccwpck_require__(8861) + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) -let hasCrypto -try { - __nccwpck_require__(6113) - hasCrypto = true -} catch { - hasCrypto = false + process.nextTick(consumeStart, stream[kConsume]) + }) } -Object.assign(Dispatcher.prototype, api) +function consumeStart (consume) { + if (consume.body === null) { + return + } -module.exports.Dispatcher = Dispatcher -module.exports.Client = Client -module.exports.Pool = Pool -module.exports.BalancedPool = BalancedPool -module.exports.Agent = Agent -module.exports.ProxyAgent = ProxyAgent -module.exports.RetryHandler = RetryHandler + const { _readableState: state } = consume.stream -module.exports.DecoratorHandler = DecoratorHandler -module.exports.RedirectHandler = RedirectHandler -module.exports.createRedirectInterceptor = createRedirectInterceptor + for (const chunk of state.buffer) { + consumePush(consume, chunk) + } -module.exports.buildConnector = buildConnector -module.exports.errors = errors + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) + } -function makeDispatcher (fn) { - return (url, opts, handler) => { - if (typeof opts === 'function') { - handler = opts - opts = null - } + consume.stream.resume() - if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { - throw new InvalidArgumentError('invalid url') - } + while (consume.stream.read() != null) { + // Loop + } +} - if (opts != null && typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume - if (opts && opts.path != null) { - if (typeof opts.path !== 'string') { - throw new InvalidArgumentError('invalid opts.path') - } + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) - let path = opts.path - if (!opts.path.startsWith('/')) { - path = `/${path}` + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength } - url = new URL(util.parseOrigin(url).origin + path) - } else { - if (!opts) { - opts = typeof url === 'object' ? url : {} + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = (__nccwpck_require__(14300).Blob) } - - url = util.parseURL(url) - } - - const { agent, dispatcher = getGlobalDispatcher() } = opts - - if (agent) { - throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') + resolve(new Blob(body, { type: stream[kContentType] })) } - return fn.call(dispatcher, { - ...opts, - origin: url.origin, - path: url.search ? `${url.pathname}${url.search}` : url.pathname, - method: opts.method || (opts.body ? 'PUT' : 'GET') - }, handler) + consumeFinish(consume) + } catch (err) { + stream.destroy(err) } } -module.exports.setGlobalDispatcher = setGlobalDispatcher -module.exports.getGlobalDispatcher = getGlobalDispatcher - -if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { - let fetchImpl = null - module.exports.fetch = async function fetch (resource) { - if (!fetchImpl) { - fetchImpl = (__nccwpck_require__(4881).fetch) - } +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) +} - try { - return await fetchImpl(...arguments) - } catch (err) { - if (typeof err === 'object') { - Error.captureStackTrace(err, this) - } +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } - throw err - } + if (err) { + consume.reject(err) + } else { + consume.resolve() } - module.exports.Headers = __nccwpck_require__(554).Headers - module.exports.Response = __nccwpck_require__(7823).Response - module.exports.Request = __nccwpck_require__(8359).Request - module.exports.FormData = __nccwpck_require__(2015).FormData - module.exports.File = __nccwpck_require__(8511).File - module.exports.FileReader = __nccwpck_require__(1446).FileReader - const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(1246) + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null +} - module.exports.setGlobalOrigin = setGlobalOrigin - module.exports.getGlobalOrigin = getGlobalOrigin - const { CacheStorage } = __nccwpck_require__(7907) - const { kConstruct } = __nccwpck_require__(9174) +/***/ }), - // Cache & CacheStorage are tightly coupled with fetch. Even if it may run - // in an older version of Node, it doesn't have any use without fetch. - module.exports.caches = new CacheStorage(kConstruct) -} +/***/ 77474: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -if (util.nodeMajor >= 16) { - const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(1724) +const assert = __nccwpck_require__(39491) +const { + ResponseStatusCodeError +} = __nccwpck_require__(48045) +const { toUSVString } = __nccwpck_require__(83983) - module.exports.deleteCookie = deleteCookie - module.exports.getCookies = getCookies - module.exports.getSetCookies = getSetCookies - module.exports.setCookie = setCookie +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) - const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) + let chunks = [] + let limit = 0 - module.exports.parseMIMEType = parseMIMEType - module.exports.serializeAMimeType = serializeAMimeType -} + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break + } + } -if (util.nodeMajor >= 18 && hasCrypto) { - const { WebSocket } = __nccwpck_require__(4284) + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return + } - module.exports.WebSocket = WebSocket -} + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } -module.exports.request = makeDispatcher(api.request) -module.exports.stream = makeDispatcher(api.stream) -module.exports.pipeline = makeDispatcher(api.pipeline) -module.exports.connect = makeDispatcher(api.connect) -module.exports.upgrade = makeDispatcher(api.upgrade) + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + } catch (err) { + // Process in a fallback if error + } -module.exports.MockClient = MockClient -module.exports.MockPool = MockPool -module.exports.MockAgent = MockAgent -module.exports.mockErrors = mockErrors + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} + +module.exports = { getResolveErrorBodyCallback } /***/ }), -/***/ 7890: +/***/ 37931: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { InvalidArgumentError } = __nccwpck_require__(8045) -const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(2785) -const DispatcherBase = __nccwpck_require__(4839) +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = __nccwpck_require__(48045) +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = __nccwpck_require__(73198) const Pool = __nccwpck_require__(4634) -const Client = __nccwpck_require__(3598) -const util = __nccwpck_require__(3983) -const createRedirectInterceptor = __nccwpck_require__(8861) -const { WeakRef, FinalizationRegistry } = __nccwpck_require__(6436)() - -const kOnConnect = Symbol('onConnect') -const kOnDisconnect = Symbol('onDisconnect') -const kOnConnectionError = Symbol('onConnectionError') -const kMaxRedirections = Symbol('maxRedirections') -const kOnDrain = Symbol('onDrain') +const { kUrl, kInterceptors } = __nccwpck_require__(72785) +const { parseOrigin } = __nccwpck_require__(83983) const kFactory = Symbol('factory') -const kFinalizer = Symbol('finalizer') + const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') + +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} function defaultFactory (origin, opts) { - return opts && opts.connections === 1 - ? new Client(origin, opts) - : new Pool(origin, opts) + return new Pool(origin, opts) } -class Agent extends DispatcherBase { - constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { super() + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 + + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 + + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } + if (typeof factory !== 'function') { throw new InvalidArgumentError('factory must be a function.') } - if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { - throw new InvalidArgumentError('connect must be a function or an object') - } + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory - if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { - throw new InvalidArgumentError('maxRedirections must be a positive number') + for (const upstream of upstreams) { + this.addUpstream(upstream) } + this._updateBalancedPoolStats() + } - if (connect && typeof connect !== 'function') { - connect = { ...connect } + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) - this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) - ? options.interceptors.Agent - : [createRedirectInterceptor({ maxRedirections })] + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) - this[kOptions] = { ...util.deepClone(options), connect } - this[kOptions].interceptors = options.interceptors - ? { ...options.interceptors } - : undefined - this[kMaxRedirections] = maxRedirections - this[kFactory] = factory - this[kClients] = new Map() - this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { - const ref = this[kClients].get(key) - if (ref !== undefined && ref.deref() === undefined) { - this[kClients].delete(key) - } + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() }) - const agent = this + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + } + }) - this[kOnDrain] = (origin, targets) => { - agent.emit('drain', origin, [agent, ...targets]) + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] } - this[kOnConnect] = (origin, targets) => { - agent.emit('connect', origin, [agent, ...targets]) - } + this._updateBalancedPoolStats() - this[kOnDisconnect] = (origin, targets, err) => { - agent.emit('disconnect', origin, [agent, ...targets], err) - } + return this + } - this[kOnConnectionError] = (origin, targets, err) => { - agent.emit('connectionError', origin, [agent, ...targets], err) - } + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) } - get [kRunning] () { - let ret = 0 - for (const ref of this[kClients].values()) { - const client = ref.deref() - /* istanbul ignore next: gc is undeterministic */ - if (client) { - ret += client[kRunning] - } + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) + + if (pool) { + this[kRemoveClient](pool) } - return ret + + return this } - [kDispatch] (opts, handler) { - let key - if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { - key = String(opts.origin) - } else { - throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) + } + + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() } - const ref = this[kClients].get(key) + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) - let dispatcher = ref ? ref.deref() : null if (!dispatcher) { - dispatcher = this[kFactory](opts.origin, this[kOptions]) - .on('drain', this[kOnDrain]) - .on('connect', this[kOnConnect]) - .on('disconnect', this[kOnDisconnect]) - .on('connectionError', this[kOnConnectionError]) + return + } - this[kClients].set(key, new WeakRef(dispatcher)) - this[kFinalizer].register(dispatcher, key) + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) + + if (allClientsBusy) { + return } - return dispatcher.dispatch(opts, handler) - } + let counter = 0 - async [kClose] () { - const closePromises = [] - for (const ref of this[kClients].values()) { - const client = ref.deref() - /* istanbul ignore else: gc is undeterministic */ - if (client) { - closePromises.push(client.close()) + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] + + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] } - } - await Promise.all(closePromises) - } + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] - async [kDestroy] (err) { - const destroyPromises = [] - for (const ref of this[kClients].values()) { - const client = ref.deref() - /* istanbul ignore else: gc is undeterministic */ - if (client) { - destroyPromises.push(client.destroy(err)) + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } + } + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool } } - await Promise.all(destroyPromises) + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] } } -module.exports = Agent +module.exports = BalancedPool /***/ }), -/***/ 7032: +/***/ 66101: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const { addAbortListener } = __nccwpck_require__(3983) -const { RequestAbortedError } = __nccwpck_require__(8045) - -const kListener = Symbol('kListener') -const kSignal = Symbol('kSignal') +"use strict"; -function abort (self) { - if (self.abort) { - self.abort() - } else { - self.onError(new RequestAbortedError()) - } -} -function addSignal (self, signal) { - self[kSignal] = null - self[kListener] = null +const { kConstruct } = __nccwpck_require__(29174) +const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(82396) +const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(83983) +const { kHeadersList } = __nccwpck_require__(72785) +const { webidl } = __nccwpck_require__(21744) +const { Response, cloneResponse } = __nccwpck_require__(27823) +const { Request } = __nccwpck_require__(48359) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(15861) +const { fetching } = __nccwpck_require__(74881) +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(52538) +const assert = __nccwpck_require__(39491) +const { getGlobalDispatcher } = __nccwpck_require__(21892) - if (!signal) { - return - } +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options + */ - if (signal.aborted) { - abort(self) - return - } +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList + */ - self[kSignal] = signal - self[kListener] = () => { - abort(self) - } +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList - addAbortListener(self[kSignal], self[kListener]) -} + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } -function removeSignal (self) { - if (!self[kSignal]) { - return + this.#relevantRequestResponseList = arguments[1] } - if ('removeEventListener' in self[kSignal]) { - self[kSignal].removeEventListener('abort', self[kListener]) - } else { - self[kSignal].removeListener('abort', self[kListener]) - } + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) - self[kSignal] = null - self[kListener] = null -} + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) -module.exports = { - addSignal, - removeSignal -} + const p = await this.matchAll(request, options) + if (p.length === 0) { + return + } -/***/ }), + return p[0] + } -/***/ 9744: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) -"use strict"; + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + // 1. + let r = null -const { AsyncResource } = __nccwpck_require__(852) -const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { addSignal, removeSignal } = __nccwpck_require__(7032) + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] -class ConnectHandler extends AsyncResource { - constructor (opts, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] + } } - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } + // 5. + // 5.1 + const responses = [] - const { signal, opaque, responseHeaders } = opts + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) + } } - super('UNDICI_CONNECT') + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! - this.opaque = opaque || null - this.responseHeaders = responseHeaders || null - this.callback = callback - this.abort = null + // 5.5.1 + const responseList = [] - addSignal(this, signal) - } + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() + responseList.push(responseObject) } - this.abort = abort - this.context = context - } - - onHeaders () { - throw new SocketError('bad connect', null) + // 6. + return Object.freeze(responseList) } - onUpgrade (statusCode, rawHeaders, socket) { - const { callback, opaque, context } = this + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) - removeSignal(this) + request = webidl.converters.RequestInfo(request) - this.callback = null + // 1. + const requests = [request] - let headers = rawHeaders - // Indicates is an HTTP2Session - if (headers != null) { - headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - } + // 2. + const responseArrayPromise = this.addAll(requests) - this.runInAsyncScope(callback, null, null, { - statusCode, - headers, - socket, - opaque, - context - }) + // 3. + return await responseArrayPromise } - onError (err) { - const { callback, opaque } = this + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) - removeSignal(this) + requests = webidl.converters['sequence'](requests) - if (callback) { - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } - } -} + // 1. + const responsePromises = [] -function connect (opts, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - connect.call(this, opts, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } + // 2. + const requestList = [] - try { - const connectHandler = new ConnectHandler(opts, callback) - this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) - } catch (err) { - if (typeof callback !== 'function') { - throw err + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } + + // 3.1 + const r = request[kState] + + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) - } -} -module.exports = connect + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] -/***/ }), + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } -/***/ 8752: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' -"use strict"; + // 5.5 + requestList.push(r) + // 5.6 + const responsePromise = createDeferredPromise() -const { - Readable, - Duplex, - PassThrough -} = __nccwpck_require__(2781) -const { - InvalidArgumentError, - InvalidReturnValueError, - RequestAbortedError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { AsyncResource } = __nccwpck_require__(852) -const { addSignal, removeSignal } = __nccwpck_require__(7032) -const assert = __nccwpck_require__(9491) + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) -const kResume = Symbol('resume') + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) -class PipelineRequest extends Readable { - constructor () { - super({ autoDestroy: true }) + for (const controller of fetchControllers) { + controller.abort() + } - this[kResume] = null - } + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } - _read () { - const { [kResume]: resume } = this + // 2. + responsePromise.resolve(response) + } + })) - if (resume) { - this[kResume] = null - resume() + // 5.8 + responsePromises.push(responsePromise.promise) } - } - _destroy (err, callback) { - this._read() + // 6. + const p = Promise.all(responsePromises) - callback(err) - } -} + // 7. + const responses = await p -class PipelineResponse extends Readable { - constructor (resume) { - super({ autoDestroy: true }) - this[kResume] = resume - } + // 7.1 + const operations = [] - _read () { - this[kResume]() - } + // 7.2 + let index = 0 - _destroy (err, callback) { - if (!err && !this._readableState.endEmitted) { - err = new RequestAbortedError() - } + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 + } - callback(err) - } -} + operations.push(operation) // 7.3.5 -class PipelineHandler extends AsyncResource { - constructor (opts, handler) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') + index++ // 7.3.6 } - if (typeof handler !== 'function') { - throw new InvalidArgumentError('invalid handler') - } + // 7.5 + const cacheJobPromise = createDeferredPromise() - const { signal, method, opaque, onInfo, responseHeaders } = opts + // 7.6.1 + let errorData = null - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - if (method === 'CONNECT') { - throw new InvalidArgumentError('invalid method') - } + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) - if (onInfo && typeof onInfo !== 'function') { - throw new InvalidArgumentError('invalid onInfo callback') - } + // 7.7 + return cacheJobPromise.promise + } - super('UNDICI_PIPELINE') + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) - this.opaque = opaque || null - this.responseHeaders = responseHeaders || null - this.handler = handler - this.abort = null - this.context = null - this.onInfo = onInfo || null + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) - this.req = new PipelineRequest().on('error', util.nop) + // 1. + let innerRequest = null - this.ret = new Duplex({ - readableObjectMode: opts.objectMode, - autoDestroy: true, - read: () => { - const { body } = this + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] + } - if (body && body.resume) { - body.resume() - } - }, - write: (chunk, encoding, callback) => { - const { req } = this + // 4. + if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Expected an http/s scheme when method is not GET' + }) + } - if (req.push(chunk, encoding) || req._readableState.destroyed) { - callback() - } else { - req[kResume] = callback - } - }, - destroy: (err, callback) => { - const { body, req, res, ret, abort } = this + // 5. + const innerResponse = response[kState] - if (!err && !ret._readableState.endEmitted) { - err = new RequestAbortedError() - } + // 6. + if (innerResponse.status === 206) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got 206 status' + }) + } - if (abort && err) { - abort() - } + // 7. + if (innerResponse.headersList.contains('vary')) { + // 7.1. + const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) - util.destroy(body, err) - util.destroy(req, err) - util.destroy(res, err) + // 7.2. + for (const fieldValue of fieldValues) { + // 7.2.1 + if (fieldValue === '*') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got * vary field value' + }) + } + } + } - removeSignal(this) + // 8. + if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Response body is locked or disturbed' + }) + } - callback(err) - } - }).on('prefinish', () => { - const { req } = this + // 9. + const clonedResponse = cloneResponse(innerResponse) - // Node < 15 does not call _final in same tick. - req.push(null) - }) + // 10. + const bodyReadPromise = createDeferredPromise() - this.res = null + // 11. + if (innerResponse.body != null) { + // 11.1 + const stream = innerResponse.body.stream - addSignal(this, signal) - } + // 11.2 + const reader = stream.getReader() - onConnect (abort, context) { - const { ret, res } = this + // 11.3 + readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) + } else { + bodyReadPromise.resolve(undefined) + } - assert(!res, 'pipeline cannot be retried') + // 12. + /** @type {CacheBatchOperation[]} */ + const operations = [] - if (ret.destroyed) { - throw new RequestAbortedError() + // 13. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 14. + request: innerRequest, // 15. + response: clonedResponse // 16. } - this.abort = abort - this.context = context - } + // 17. + operations.push(operation) - onHeaders (statusCode, rawHeaders, resume) { - const { opaque, handler, context } = this + // 19. + const bytes = await bodyReadPromise.promise - if (statusCode < 200) { - if (this.onInfo) { - const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - this.onInfo({ statusCode, headers }) - } - return + if (clonedResponse.body != null) { + clonedResponse.body.source = bytes } - this.res = new PipelineResponse(resume) + // 19.1 + const cacheJobPromise = createDeferredPromise() - let body - try { - this.handler = null - const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - body = this.runInAsyncScope(handler, null, { - statusCode, - headers, - opaque, - body: this.res, - context - }) - } catch (err) { - this.res.on('error', util.nop) - throw err - } + // 19.2.1 + let errorData = null - if (!body || typeof body.on !== 'function') { - throw new InvalidReturnValueError('expected Readable') + // 19.2.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e } - body - .on('data', (chunk) => { - const { ret, body } = this + // 19.2.3 + queueMicrotask(() => { + // 19.2.3.1 + if (errorData === null) { + cacheJobPromise.resolve() + } else { // 19.2.3.2 + cacheJobPromise.reject(errorData) + } + }) - if (!ret.push(chunk) && body.pause) { - body.pause() - } - }) - .on('error', (err) => { - const { ret } = this + return cacheJobPromise.promise + } - util.destroy(ret, err) - }) - .on('end', () => { - const { ret } = this + async delete (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) - ret.push(null) - }) - .on('close', () => { - const { ret } = this + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) - if (!ret._readableState.ended) { - util.destroy(ret, new RequestAbortedError()) - } - }) + /** + * @type {Request} + */ + let r = null - this.body = body - } + if (request instanceof Request) { + r = request[kState] - onData (chunk) { - const { res } = this - return res.push(chunk) - } + if (r.method !== 'GET' && !options.ignoreMethod) { + return false + } + } else { + assert(typeof request === 'string') - onComplete (trailers) { - const { res } = this - res.push(null) - } + r = new Request(request)[kState] + } - onError (err) { - const { ret } = this - this.handler = null - util.destroy(ret, err) - } -} + /** @type {CacheBatchOperation[]} */ + const operations = [] -function pipeline (opts, handler) { - try { - const pipelineHandler = new PipelineHandler(opts, handler) - this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) - return pipelineHandler.ret - } catch (err) { - return new PassThrough().destroy(err) - } -} + /** @type {CacheBatchOperation} */ + const operation = { + type: 'delete', + request: r, + options + } -module.exports = pipeline + operations.push(operation) + const cacheJobPromise = createDeferredPromise() -/***/ }), + let errorData = null + let requestResponses -/***/ 5448: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + try { + requestResponses = this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } -"use strict"; + queueMicrotask(() => { + if (errorData === null) { + cacheJobPromise.resolve(!!requestResponses?.length) + } else { + cacheJobPromise.reject(errorData) + } + }) + return cacheJobPromise.promise + } -const Readable = __nccwpck_require__(3858) -const { - InvalidArgumentError, - RequestAbortedError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) -const { AsyncResource } = __nccwpck_require__(852) -const { addSignal, removeSignal } = __nccwpck_require__(7032) + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys + * @param {any} request + * @param {import('../../types/cache').CacheQueryOptions} options + * @returns {readonly Request[]} + */ + async keys (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) -class RequestHandler extends AsyncResource { - constructor (opts, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) - const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts + // 1. + let r = null - try { - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } + // 2. + if (request !== undefined) { + // 2.1 + if (request instanceof Request) { + // 2.1.1 + r = request[kState] - if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { - throw new InvalidArgumentError('invalid highWaterMark') + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { // 2.2 + r = new Request(request)[kState] } + } - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') - } + // 4. + const promise = createDeferredPromise() - if (method === 'CONNECT') { - throw new InvalidArgumentError('invalid method') - } + // 5. + // 5.1 + const requests = [] - if (onInfo && typeof onInfo !== 'function') { - throw new InvalidArgumentError('invalid onInfo callback') + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + // 5.2.1.1 + requests.push(requestResponse[0]) } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) - super('UNDICI_REQUEST') - } catch (err) { - if (util.isStream(body)) { - util.destroy(body.on('error', util.nop), err) + // 5.3.2 + for (const requestResponse of requestResponses) { + // 5.3.2.1 + requests.push(requestResponse[0]) } - throw err } - this.responseHeaders = responseHeaders || null - this.opaque = opaque || null - this.callback = callback - this.res = null - this.abort = null - this.body = body - this.trailers = {} - this.context = null - this.onInfo = onInfo || null - this.throwOnError = throwOnError - this.highWaterMark = highWaterMark + // 5.4 + queueMicrotask(() => { + // 5.4.1 + const requestList = [] - if (util.isStream(body)) { - body.on('error', (err) => { - this.onError(err) - }) - } + // 5.4.2 + for (const request of requests) { + const requestObject = new Request('https://a') + requestObject[kState] = request + requestObject[kHeaders][kHeadersList] = request.headersList + requestObject[kHeaders][kGuard] = 'immutable' + requestObject[kRealm] = request.client - addSignal(this, signal) - } + // 5.4.2.1 + requestList.push(requestObject) + } - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() - } + // 5.4.3 + promise.resolve(Object.freeze(requestList)) + }) - this.abort = abort - this.context = context + return promise.promise } - onHeaders (statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this + /** + * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm + * @param {CacheBatchOperation[]} operations + * @returns {requestResponseList} + */ + #batchCacheOperations (operations) { + // 1. + const cache = this.#relevantRequestResponseList - const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + // 2. + const backupCache = [...cache] - if (statusCode < 200) { - if (this.onInfo) { - this.onInfo({ statusCode, headers }) - } - return - } + // 3. + const addedItems = [] - const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers - const contentType = parsedHeaders['content-type'] - const body = new Readable({ resume, abort, contentType, highWaterMark }) + // 4.1 + const resultList = [] - this.callback = null - this.res = body - if (callback !== null) { - if (this.throwOnError && statusCode >= 400) { - this.runInAsyncScope(getResolveErrorBodyCallback, null, - { callback, body, contentType, statusCode, statusMessage, headers } - ) - } else { - this.runInAsyncScope(callback, null, null, { - statusCode, - headers, - trailers: this.trailers, - opaque, - body, - context - }) - } - } - } + try { + // 4.2 + for (const operation of operations) { + // 4.2.1 + if (operation.type !== 'delete' && operation.type !== 'put') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'operation type does not match "delete" or "put"' + }) + } - onData (chunk) { - const { res } = this - return res.push(chunk) - } + // 4.2.2 + if (operation.type === 'delete' && operation.response != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'delete operation should not have an associated response' + }) + } - onComplete (trailers) { - const { res } = this + // 4.2.3 + if (this.#queryCache(operation.request, operation.options, addedItems).length) { + throw new DOMException('???', 'InvalidStateError') + } - removeSignal(this) + // 4.2.4 + let requestResponses - util.parseHeaders(trailers, this.trailers) + // 4.2.5 + if (operation.type === 'delete') { + // 4.2.5.1 + requestResponses = this.#queryCache(operation.request, operation.options) - res.push(null) - } + // TODO: the spec is wrong, this is needed to pass WPTs + if (requestResponses.length === 0) { + return [] + } - onError (err) { - const { res, callback, body, opaque } = this + // 4.2.5.2 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) - removeSignal(this) + // 4.2.5.2.1 + cache.splice(idx, 1) + } + } else if (operation.type === 'put') { // 4.2.6 + // 4.2.6.1 + if (operation.response == null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'put operation should have an associated response' + }) + } - if (callback) { - // TODO: Does this need queueMicrotask? - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } + // 4.2.6.2 + const r = operation.request - if (res) { - this.res = null - // Ensure all queued handlers are invoked before destroying res. - queueMicrotask(() => { - util.destroy(res, err) - }) - } + // 4.2.6.3 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'expected http or https scheme' + }) + } - if (body) { - this.body = null - util.destroy(body, err) - } - } -} + // 4.2.6.4 + if (r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'not get method' + }) + } -function request (opts, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - request.call(this, opts, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } + // 4.2.6.5 + if (operation.options != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'options must not be defined' + }) + } - try { - this.dispatch(opts, new RequestHandler(opts, callback)) - } catch (err) { - if (typeof callback !== 'function') { - throw err + // 4.2.6.6 + requestResponses = this.#queryCache(operation.request) + + // 4.2.6.7 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.6.7.1 + cache.splice(idx, 1) + } + + // 4.2.6.8 + cache.push([operation.request, operation.response]) + + // 4.2.6.10 + addedItems.push([operation.request, operation.response]) + } + + // 4.2.7 + resultList.push([operation.request, operation.response]) + } + + // 4.3 + return resultList + } catch (e) { // 5. + // 5.1 + this.#relevantRequestResponseList.length = 0 + + // 5.2 + this.#relevantRequestResponseList = backupCache + + // 5.3 + throw e } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) } -} -module.exports = request -module.exports.RequestHandler = RequestHandler + /** + * @see https://w3c.github.io/ServiceWorker/#query-cache + * @param {any} requestQuery + * @param {import('../../types/cache').CacheQueryOptions} options + * @param {requestResponseList} targetStorage + * @returns {requestResponseList} + */ + #queryCache (requestQuery, options, targetStorage) { + /** @type {requestResponseList} */ + const resultList = [] + + const storage = targetStorage ?? this.#relevantRequestResponseList + for (const requestResponse of storage) { + const [cachedRequest, cachedResponse] = requestResponse + if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { + resultList.push(requestResponse) + } + } -/***/ }), + return resultList + } -/***/ 5395: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm + * @param {any} requestQuery + * @param {any} request + * @param {any | null} response + * @param {import('../../types/cache').CacheQueryOptions | undefined} options + * @returns {boolean} + */ + #requestMatchesCachedItem (requestQuery, request, response = null, options) { + // if (options?.ignoreMethod === false && request.method === 'GET') { + // return false + // } -"use strict"; + const queryURL = new URL(requestQuery.url) + const cachedURL = new URL(request.url) -const { finished, PassThrough } = __nccwpck_require__(2781) -const { - InvalidArgumentError, - InvalidReturnValueError, - RequestAbortedError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) -const { AsyncResource } = __nccwpck_require__(852) -const { addSignal, removeSignal } = __nccwpck_require__(7032) + if (options?.ignoreSearch) { + cachedURL.search = '' -class StreamHandler extends AsyncResource { - constructor (opts, factory, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') + queryURL.search = '' } - const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts - - try { - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } + if (!urlEquals(queryURL, cachedURL, true)) { + return false + } - if (typeof factory !== 'function') { - throw new InvalidArgumentError('invalid factory') - } + if ( + response == null || + options?.ignoreVary || + !response.headersList.contains('vary') + ) { + return true + } - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') - } + const fieldValues = getFieldValues(response.headersList.get('vary')) - if (method === 'CONNECT') { - throw new InvalidArgumentError('invalid method') + for (const fieldValue of fieldValues) { + if (fieldValue === '*') { + return false } - if (onInfo && typeof onInfo !== 'function') { - throw new InvalidArgumentError('invalid onInfo callback') - } + const requestValue = request.headersList.get(fieldValue) + const queryValue = requestQuery.headersList.get(fieldValue) - super('UNDICI_STREAM') - } catch (err) { - if (util.isStream(body)) { - util.destroy(body.on('error', util.nop), err) + // If one has the header and the other doesn't, or one has + // a different value than the other, return false + if (requestValue !== queryValue) { + return false } - throw err } - this.responseHeaders = responseHeaders || null - this.opaque = opaque || null - this.factory = factory - this.callback = callback - this.res = null - this.abort = null - this.context = null - this.trailers = null - this.body = body - this.onInfo = onInfo || null - this.throwOnError = throwOnError || false + return true + } +} - if (util.isStream(body)) { - body.on('error', (err) => { - this.onError(err) - }) - } +Object.defineProperties(Cache.prototype, { + [Symbol.toStringTag]: { + value: 'Cache', + configurable: true + }, + match: kEnumerableProperty, + matchAll: kEnumerableProperty, + add: kEnumerableProperty, + addAll: kEnumerableProperty, + put: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) - addSignal(this, signal) +const cacheQueryOptionConverters = [ + { + key: 'ignoreSearch', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreMethod', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreVary', + converter: webidl.converters.boolean, + defaultValue: false } +] - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() - } +webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) - this.abort = abort - this.context = context +webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ + ...cacheQueryOptionConverters, + { + key: 'cacheName', + converter: webidl.converters.DOMString } +]) - onHeaders (statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context, callback, responseHeaders } = this +webidl.converters.Response = webidl.interfaceConverter(Response) - const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.RequestInfo +) - if (statusCode < 200) { - if (this.onInfo) { - this.onInfo({ statusCode, headers }) - } - return - } +module.exports = { + Cache +} - this.factory = null - let res +/***/ }), - if (this.throwOnError && statusCode >= 400) { - const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers - const contentType = parsedHeaders['content-type'] - res = new PassThrough() +/***/ 37907: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.callback = null - this.runInAsyncScope(getResolveErrorBodyCallback, null, - { callback, body: res, contentType, statusCode, statusMessage, headers } - ) - } else { - if (factory === null) { - return - } +"use strict"; - res = this.runInAsyncScope(factory, null, { - statusCode, - headers, - opaque, - context - }) - if ( - !res || - typeof res.write !== 'function' || - typeof res.end !== 'function' || - typeof res.on !== 'function' - ) { - throw new InvalidReturnValueError('expected Writable') - } +const { kConstruct } = __nccwpck_require__(29174) +const { Cache } = __nccwpck_require__(66101) +const { webidl } = __nccwpck_require__(21744) +const { kEnumerableProperty } = __nccwpck_require__(83983) - // TODO: Avoid finished. It registers an unnecessary amount of listeners. - finished(res, { readable: false }, (err) => { - const { callback, res, opaque, trailers, abort } = this +class CacheStorage { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map + * @type {Map} + */ + async has (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) - return res ? res.write(chunk) : true + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1.1 + // 2.2 + return this.#caches.has(cacheName) } - onComplete (trailers) { - const { res } = this + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open + * @param {string} cacheName + * @returns {Promise} + */ + async open (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) - removeSignal(this) + cacheName = webidl.converters.DOMString(cacheName) - if (!res) { - return + // 2.1 + if (this.#caches.has(cacheName)) { + // await caches.open('v1') !== await caches.open('v1') + + // 2.1.1 + const cache = this.#caches.get(cacheName) + + // 2.1.1.1 + return new Cache(kConstruct, cache) } - this.trailers = util.parseHeaders(trailers) + // 2.2 + const cache = [] - res.end() + // 2.3 + this.#caches.set(cacheName, cache) + + // 2.4 + return new Cache(kConstruct, cache) } - onError (err) { - const { res, callback, opaque, body } = this + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete + * @param {string} cacheName + * @returns {Promise} + */ + async delete (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) - removeSignal(this) + cacheName = webidl.converters.DOMString(cacheName) - this.factory = null + return this.#caches.delete(cacheName) + } - if (res) { - this.res = null - util.destroy(res, err) - } else if (callback) { - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys + * @returns {string[]} + */ + async keys () { + webidl.brandCheck(this, CacheStorage) - if (body) { - this.body = null - util.destroy(body, err) - } + // 2.1 + const keys = this.#caches.keys() + + // 2.2 + return [...keys] } } -function stream (opts, factory, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - stream.call(this, opts, factory, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } +Object.defineProperties(CacheStorage.prototype, { + [Symbol.toStringTag]: { + value: 'CacheStorage', + configurable: true + }, + match: kEnumerableProperty, + has: kEnumerableProperty, + open: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) - try { - this.dispatch(opts, new StreamHandler(opts, factory, callback)) - } catch (err) { - if (typeof callback !== 'function') { - throw err - } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) - } +module.exports = { + CacheStorage } -module.exports = stream - /***/ }), -/***/ 6923: +/***/ 29174: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) -const { AsyncResource } = __nccwpck_require__(852) -const util = __nccwpck_require__(3983) -const { addSignal, removeSignal } = __nccwpck_require__(7032) -const assert = __nccwpck_require__(9491) +module.exports = { + kConstruct: (__nccwpck_require__(72785).kConstruct) +} -class UpgradeHandler extends AsyncResource { - constructor (opts, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } +/***/ }), - const { signal, opaque, responseHeaders } = opts +/***/ 82396: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') - } +"use strict"; - super('UNDICI_UPGRADE') - this.responseHeaders = responseHeaders || null - this.opaque = opaque || null - this.callback = callback - this.abort = null - this.context = null +const assert = __nccwpck_require__(39491) +const { URLSerializer } = __nccwpck_require__(685) +const { isValidHeaderName } = __nccwpck_require__(52538) - addSignal(this, signal) - } +/** + * @see https://url.spec.whatwg.org/#concept-url-equals + * @param {URL} A + * @param {URL} B + * @param {boolean | undefined} excludeFragment + * @returns {boolean} + */ +function urlEquals (A, B, excludeFragment = false) { + const serializedA = URLSerializer(A, excludeFragment) - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() + const serializedB = URLSerializer(B, excludeFragment) + + return serializedA === serializedB +} + +/** + * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 + * @param {string} header + */ +function fieldValues (header) { + assert(header !== null) + + const values = [] + + for (let value of header.split(',')) { + value = value.trim() + + if (!value.length) { + continue + } else if (!isValidHeaderName(value)) { + continue } - this.abort = abort - this.context = null + values.push(value) } - onHeaders () { - throw new SocketError('bad upgrade', null) - } + return values +} - onUpgrade (statusCode, rawHeaders, socket) { - const { callback, opaque, context } = this +module.exports = { + urlEquals, + fieldValues +} - assert.strictEqual(statusCode, 101) - removeSignal(this) +/***/ }), - this.callback = null - const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - this.runInAsyncScope(callback, null, null, { - headers, - socket, - opaque, - context - }) - } +/***/ 33598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - onError (err) { - const { callback, opaque } = this +"use strict"; +// @ts-check - removeSignal(this) - if (callback) { - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } - } + +/* global WebAssembly */ + +const assert = __nccwpck_require__(39491) +const net = __nccwpck_require__(41808) +const http = __nccwpck_require__(13685) +const { pipeline } = __nccwpck_require__(12781) +const util = __nccwpck_require__(83983) +const timers = __nccwpck_require__(29459) +const Request = __nccwpck_require__(62905) +const DispatcherBase = __nccwpck_require__(74839) +const { + RequestContentLengthMismatchError, + ResponseContentLengthMismatchError, + InvalidArgumentError, + RequestAbortedError, + HeadersTimeoutError, + HeadersOverflowError, + SocketError, + InformationalError, + BodyTimeoutError, + HTTPParserError, + ResponseExceededMaxSizeError, + ClientDestroyedError +} = __nccwpck_require__(48045) +const buildConnector = __nccwpck_require__(82067) +const { + kUrl, + kReset, + kServerName, + kClient, + kBusy, + kParser, + kConnect, + kBlocking, + kResuming, + kRunning, + kPending, + kSize, + kWriting, + kQueue, + kConnected, + kConnecting, + kNeedDrain, + kNoRef, + kKeepAliveDefaultTimeout, + kHostHeader, + kPendingIdx, + kRunningIdx, + kError, + kPipelining, + kSocket, + kKeepAliveTimeoutValue, + kMaxHeadersSize, + kKeepAliveMaxTimeout, + kKeepAliveTimeoutThreshold, + kHeadersTimeout, + kBodyTimeout, + kStrictContentLength, + kConnector, + kMaxRedirections, + kMaxRequests, + kCounter, + kClose, + kDestroy, + kDispatch, + kInterceptors, + kLocalAddress, + kMaxResponseSize, + kHTTPConnVersion, + // HTTP2 + kHost, + kHTTP2Session, + kHTTP2SessionState, + kHTTP2BuildRequest, + kHTTP2CopyHeaders, + kHTTP1BuildRequest +} = __nccwpck_require__(72785) + +/** @type {import('http2')} */ +let http2 +try { + http2 = __nccwpck_require__(85158) +} catch { + // @ts-ignore + http2 = { constants: {} } } -function upgrade (opts, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - upgrade.call(this, opts, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) +const { + constants: { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_EXPECT, + HTTP2_HEADER_STATUS } +} = http2 - try { - const upgradeHandler = new UpgradeHandler(opts, callback) - this.dispatch({ - ...opts, - method: opts.method || 'GET', - upgrade: opts.protocol || 'Websocket' - }, upgradeHandler) - } catch (err) { - if (typeof callback !== 'function') { - throw err - } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) - } -} +// Experimental +let h2ExperimentalWarned = false -module.exports = upgrade +const FastBuffer = Buffer[Symbol.species] +const kClosedResolve = Symbol('kClosedResolve') -/***/ }), +const channels = {} -/***/ 4059: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +try { + const diagnosticsChannel = __nccwpck_require__(67643) + channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') + channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') + channels.connectError = diagnosticsChannel.channel('undici:client:connectError') + channels.connected = diagnosticsChannel.channel('undici:client:connected') +} catch { + channels.sendHeaders = { hasSubscribers: false } + channels.beforeConnect = { hasSubscribers: false } + channels.connectError = { hasSubscribers: false } + channels.connected = { hasSubscribers: false } +} -"use strict"; +/** + * @type {import('../types/client').default} + */ +class Client extends DispatcherBase { + /** + * + * @param {string|URL} url + * @param {import('../types/client').Client.Options} options + */ + constructor (url, { + interceptors, + maxHeaderSize, + headersTimeout, + socketTimeout, + requestTimeout, + connectTimeout, + bodyTimeout, + idleTimeout, + keepAlive, + keepAliveTimeout, + maxKeepAliveTimeout, + keepAliveMaxTimeout, + keepAliveTimeoutThreshold, + socketPath, + pipelining, + tls, + strictContentLength, + maxCachedSessions, + maxRedirections, + connect, + maxRequestsPerClient, + localAddress, + maxResponseSize, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + // h2 + allowH2, + maxConcurrentStreams + } = {}) { + super() + if (keepAlive !== undefined) { + throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') + } -module.exports.request = __nccwpck_require__(5448) -module.exports.stream = __nccwpck_require__(5395) -module.exports.pipeline = __nccwpck_require__(8752) -module.exports.upgrade = __nccwpck_require__(6923) -module.exports.connect = __nccwpck_require__(9744) + if (socketTimeout !== undefined) { + throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } + if (requestTimeout !== undefined) { + throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } -/***/ }), + if (idleTimeout !== undefined) { + throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + } -/***/ 3858: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (maxKeepAliveTimeout !== undefined) { + throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } -"use strict"; -// Ported from https://github.com/nodejs/undici/pull/907 + if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { + throw new InvalidArgumentError('invalid maxHeaderSize') + } + if (socketPath != null && typeof socketPath !== 'string') { + throw new InvalidArgumentError('invalid socketPath') + } + if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { + throw new InvalidArgumentError('invalid connectTimeout') + } -const assert = __nccwpck_require__(9491) -const { Readable } = __nccwpck_require__(2781) -const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3983) + if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveTimeout') + } -let Blob + if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + } -const kConsume = Symbol('kConsume') -const kReading = Symbol('kReading') -const kBody = Symbol('kBody') -const kAbort = Symbol('abort') -const kContentType = Symbol('kContentType') + if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { + throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') + } -const noop = () => {} + if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') + } -module.exports = class BodyReadable extends Readable { - constructor ({ - resume, - abort, - contentType = '', - highWaterMark = 64 * 1024 // Same as nodejs fs streams. - }) { - super({ - autoDestroy: true, - read: resume, - highWaterMark - }) + if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') + } - this._readableState.dataEmitted = false + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } - this[kAbort] = abort - this[kConsume] = null - this[kBody] = null - this[kContentType] = contentType + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } - // Is stream being consumed through Readable API? - // This is an optimization so that we avoid checking - // for 'data' and 'readable' listeners in the hot path - // inside push(). - this[kReading] = false - } + if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { + throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + } - destroy (err) { - if (this.destroyed) { - // Node < 16 - return this + if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { + throw new InvalidArgumentError('localAddress must be valid string IP address') } - if (!err && !this._readableState.endEmitted) { - err = new RequestAbortedError() + if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { + throw new InvalidArgumentError('maxResponseSize must be a positive number') } - if (err) { - this[kAbort]() + if ( + autoSelectFamilyAttemptTimeout != null && + (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) + ) { + throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') } - return super.destroy(err) - } + // h2 + if (allowH2 != null && typeof allowH2 !== 'boolean') { + throw new InvalidArgumentError('allowH2 must be a valid boolean value') + } - emit (ev, ...args) { - if (ev === 'data') { - // Node < 16.7 - this._readableState.dataEmitted = true - } else if (ev === 'error') { - // Node < 16 - this._readableState.errorEmitted = true + if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { + throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') } - return super.emit(ev, ...args) - } - on (ev, ...args) { - if (ev === 'data' || ev === 'readable') { - this[kReading] = true + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) } - return super.on(ev, ...args) - } - addListener (ev, ...args) { - return this.on(ev, ...args) - } + this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) + ? interceptors.Client + : [createRedirectInterceptor({ maxRedirections })] + this[kUrl] = util.parseOrigin(url) + this[kConnector] = connect + this[kSocket] = null + this[kPipelining] = pipelining != null ? pipelining : 1 + this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize + this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout + this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout + this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold + this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] + this[kServerName] = null + this[kLocalAddress] = localAddress != null ? localAddress : null + this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` + this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 + this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 + this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength + this[kMaxRedirections] = maxRedirections + this[kMaxRequests] = maxRequestsPerClient + this[kClosedResolve] = null + this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 + this[kHTTPConnVersion] = 'h1' - off (ev, ...args) { - const ret = super.off(ev, ...args) - if (ev === 'data' || ev === 'readable') { - this[kReading] = ( - this.listenerCount('data') > 0 || - this.listenerCount('readable') > 0 - ) - } - return ret - } + // HTTP/2 + this[kHTTP2Session] = null + this[kHTTP2SessionState] = !allowH2 + ? null + : { + // streams: null, // Fixed queue of streams - For future support of `push` + openStreams: 0, // Keep track of them to decide wether or not unref the session + maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + } + this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` - removeListener (ev, ...args) { - return this.off(ev, ...args) - } + // kQueue is built up of 3 sections separated by + // the kRunningIdx and kPendingIdx indices. + // | complete | running | pending | + // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length + // kRunningIdx points to the first running element. + // kPendingIdx points to the first pending element. + // This implements a fast queue with an amortized + // time of O(1). - push (chunk) { - if (this[kConsume] && chunk !== null && this.readableLength === 0) { - consumePush(this[kConsume], chunk) - return this[kReading] ? super.push(chunk) : true - } - return super.push(chunk) + this[kQueue] = [] + this[kRunningIdx] = 0 + this[kPendingIdx] = 0 } - // https://fetch.spec.whatwg.org/#dom-body-text - async text () { - return consume(this, 'text') + get pipelining () { + return this[kPipelining] } - // https://fetch.spec.whatwg.org/#dom-body-json - async json () { - return consume(this, 'json') + set pipelining (value) { + this[kPipelining] = value + resume(this, true) } - // https://fetch.spec.whatwg.org/#dom-body-blob - async blob () { - return consume(this, 'blob') + get [kPending] () { + return this[kQueue].length - this[kPendingIdx] } - // https://fetch.spec.whatwg.org/#dom-body-arraybuffer - async arrayBuffer () { - return consume(this, 'arrayBuffer') + get [kRunning] () { + return this[kPendingIdx] - this[kRunningIdx] } - // https://fetch.spec.whatwg.org/#dom-body-formdata - async formData () { - // TODO: Implement. - throw new NotSupportedError() + get [kSize] () { + return this[kQueue].length - this[kRunningIdx] } - // https://fetch.spec.whatwg.org/#dom-body-bodyused - get bodyUsed () { - return util.isDisturbed(this) + get [kConnected] () { + return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed } - // https://fetch.spec.whatwg.org/#dom-body-body - get body () { - if (!this[kBody]) { - this[kBody] = ReadableStreamFrom(this) - if (this[kConsume]) { - // TODO: Is this the best way to force a lock? - this[kBody].getReader() // Ensure stream is locked. - assert(this[kBody].locked) - } - } - return this[kBody] + get [kBusy] () { + const socket = this[kSocket] + return ( + (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || + (this[kSize] >= (this[kPipelining] || 1)) || + this[kPending] > 0 + ) } - dump (opts) { - let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 - const signal = opts && opts.signal - - if (signal) { - try { - if (typeof signal !== 'object' || !('aborted' in signal)) { - throw new InvalidArgumentError('signal must be an AbortSignal') - } - util.throwIfAborted(signal) - } catch (err) { - return Promise.reject(err) - } - } - - if (this.closed) { - return Promise.resolve(null) - } - - return new Promise((resolve, reject) => { - const signalListenerCleanup = signal - ? util.addAbortListener(signal, () => { - this.destroy() - }) - : noop - - this - .on('close', function () { - signalListenerCleanup() - if (signal && signal.aborted) { - reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) - } else { - resolve(null) - } - }) - .on('error', noop) - .on('data', function (chunk) { - limit -= chunk.length - if (limit <= 0) { - this.destroy() - } - }) - .resume() - }) + /* istanbul ignore: only used for test */ + [kConnect] (cb) { + connect(this) + this.once('connect', cb) } -} - -// https://streams.spec.whatwg.org/#readablestream-locked -function isLocked (self) { - // Consume is an implicit lock. - return (self[kBody] && self[kBody].locked === true) || self[kConsume] -} - -// https://fetch.spec.whatwg.org/#body-unusable -function isUnusable (self) { - return util.isDisturbed(self) || isLocked(self) -} -async function consume (stream, type) { - if (isUnusable(stream)) { - throw new TypeError('unusable') - } + [kDispatch] (opts, handler) { + const origin = opts.origin || this[kUrl].origin - assert(!stream[kConsume]) + const request = this[kHTTPConnVersion] === 'h2' + ? Request[kHTTP2BuildRequest](origin, opts, handler) + : Request[kHTTP1BuildRequest](origin, opts, handler) - return new Promise((resolve, reject) => { - stream[kConsume] = { - type, - stream, - resolve, - reject, - length: 0, - body: [] + this[kQueue].push(request) + if (this[kResuming]) { + // Do nothing. + } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { + // Wait a tick in case stream/iterator is ended in the same tick. + this[kResuming] = 1 + process.nextTick(resume, this) + } else { + resume(this, true) } - stream - .on('error', function (err) { - consumeFinish(this[kConsume], err) - }) - .on('close', function () { - if (this[kConsume].body !== null) { - consumeFinish(this[kConsume], new RequestAbortedError()) - } - }) - - process.nextTick(consumeStart, stream[kConsume]) - }) -} - -function consumeStart (consume) { - if (consume.body === null) { - return - } - - const { _readableState: state } = consume.stream + if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { + this[kNeedDrain] = 2 + } - for (const chunk of state.buffer) { - consumePush(consume, chunk) + return this[kNeedDrain] < 2 } - if (state.endEmitted) { - consumeEnd(this[kConsume]) - } else { - consume.stream.on('end', function () { - consumeEnd(this[kConsume]) + async [kClose] () { + // TODO: for H2 we need to gracefully flush the remaining enqueued + // request and close each stream. + return new Promise((resolve) => { + if (!this[kSize]) { + resolve(null) + } else { + this[kClosedResolve] = resolve + } }) } - consume.stream.resume() - - while (consume.stream.read() != null) { - // Loop - } -} - -function consumeEnd (consume) { - const { type, body, resolve, stream, length } = consume - - try { - if (type === 'text') { - resolve(toUSVString(Buffer.concat(body))) - } else if (type === 'json') { - resolve(JSON.parse(Buffer.concat(body))) - } else if (type === 'arrayBuffer') { - const dst = new Uint8Array(length) + async [kDestroy] (err) { + return new Promise((resolve) => { + const requests = this[kQueue].splice(this[kPendingIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } - let pos = 0 - for (const buf of body) { - dst.set(buf, pos) - pos += buf.byteLength + const callback = () => { + if (this[kClosedResolve]) { + // TODO (fix): Should we error here with ClientDestroyedError? + this[kClosedResolve]() + this[kClosedResolve] = null + } + resolve() } - resolve(dst.buffer) - } else if (type === 'blob') { - if (!Blob) { - Blob = (__nccwpck_require__(4300).Blob) + if (this[kHTTP2Session] != null) { + util.destroy(this[kHTTP2Session], err) + this[kHTTP2Session] = null + this[kHTTP2SessionState] = null } - resolve(new Blob(body, { type: stream[kContentType] })) - } - consumeFinish(consume) - } catch (err) { - stream.destroy(err) - } -} + if (!this[kSocket]) { + queueMicrotask(callback) + } else { + util.destroy(this[kSocket].on('close', callback), err) + } -function consumePush (consume, chunk) { - consume.length += chunk.length - consume.body.push(chunk) + resume(this) + }) + } } -function consumeFinish (consume, err) { - if (consume.body === null) { - return - } +function onHttp2SessionError (err) { + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') - if (err) { - consume.reject(err) - } else { - consume.resolve() - } + this[kSocket][kError] = err - consume.type = null - consume.stream = null - consume.resolve = null - consume.reject = null - consume.length = 0 - consume.body = null + onError(this[kClient], err) } +function onHttp2FrameError (type, code, id) { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) -/***/ }), - -/***/ 7474: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (id === 0) { + this[kSocket][kError] = err + onError(this[kClient], err) + } +} -const assert = __nccwpck_require__(9491) -const { - ResponseStatusCodeError -} = __nccwpck_require__(8045) -const { toUSVString } = __nccwpck_require__(3983) +function onHttp2SessionEnd () { + util.destroy(this, new SocketError('other side closed')) + util.destroy(this[kSocket], new SocketError('other side closed')) +} -async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { - assert(body) +function onHTTP2GoAway (code) { + const client = this[kClient] + const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) + client[kSocket] = null + client[kHTTP2Session] = null - let chunks = [] - let limit = 0 + if (client.destroyed) { + assert(this[kPending] === 0) - for await (const chunk of body) { - chunks.push(chunk) - limit += chunk.length - if (limit > 128 * 1024) { - chunks = null - break + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) } - } + } else if (client[kRunning] > 0) { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null - if (statusCode === 204 || !contentType || !chunks) { - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) - return + errorRequest(client, request, err) } - try { - if (contentType.startsWith('application/json')) { - const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) - return - } - - if (contentType.startsWith('text/')) { - const payload = toUSVString(Buffer.concat(chunks)) - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) - return - } - } catch (err) { - // Process in a fallback if error - } + client[kPendingIdx] = client[kRunningIdx] - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) -} + assert(client[kRunning] === 0) -module.exports = { getResolveErrorBodyCallback } + client.emit('disconnect', + client[kUrl], + [client], + err + ) + resume(client) +} -/***/ }), +const constants = __nccwpck_require__(30953) +const createRedirectInterceptor = __nccwpck_require__(38861) +const EMPTY_BUF = Buffer.alloc(0) -/***/ 7931: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +async function lazyllhttp () { + const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(61145) : undefined -"use strict"; + let mod + try { + mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(95627), 'base64')) + } catch (e) { + /* istanbul ignore next */ + // We could check if the error was caused by the simd option not + // being enabled, but the occurring of this other error + // * https://github.com/emscripten-core/emscripten/issues/11495 + // got me to remove that check to avoid breaking Node 12. + mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(61145), 'base64')) + } -const { - BalancedPoolMissingUpstreamError, - InvalidArgumentError -} = __nccwpck_require__(8045) -const { - PoolBase, - kClients, - kNeedDrain, - kAddClient, - kRemoveClient, - kGetDispatcher -} = __nccwpck_require__(3198) -const Pool = __nccwpck_require__(4634) -const { kUrl, kInterceptors } = __nccwpck_require__(2785) -const { parseOrigin } = __nccwpck_require__(3983) -const kFactory = Symbol('factory') + return await WebAssembly.instantiate(mod, { + env: { + /* eslint-disable camelcase */ -const kOptions = Symbol('options') -const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') -const kCurrentWeight = Symbol('kCurrentWeight') -const kIndex = Symbol('kIndex') -const kWeight = Symbol('kWeight') -const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') -const kErrorPenalty = Symbol('kErrorPenalty') + wasm_on_url: (p, at, len) => { + /* istanbul ignore next */ + return 0 + }, + wasm_on_status: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_begin: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageBegin() || 0 + }, + wasm_on_header_field: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_header_value: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 + }, + wasm_on_body: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_complete: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageComplete() || 0 + } -function getGreatestCommonDivisor (a, b) { - if (b === 0) return a - return getGreatestCommonDivisor(b, a % b) + /* eslint-enable camelcase */ + } + }) } -function defaultFactory (origin, opts) { - return new Pool(origin, opts) -} +let llhttpInstance = null +let llhttpPromise = lazyllhttp() +llhttpPromise.catch() -class BalancedPool extends PoolBase { - constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { - super() +let currentParser = null +let currentBufferRef = null +let currentBufferSize = 0 +let currentBufferPtr = null - this[kOptions] = opts - this[kIndex] = -1 - this[kCurrentWeight] = 0 +const TIMEOUT_HEADERS = 1 +const TIMEOUT_BODY = 2 +const TIMEOUT_IDLE = 3 - this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 - this[kErrorPenalty] = this[kOptions].errorPenalty || 15 +class Parser { + constructor (client, socket, { exports }) { + assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) - if (!Array.isArray(upstreams)) { - upstreams = [upstreams] - } + this.llhttp = exports + this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) + this.client = client + this.socket = socket + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + this.statusCode = null + this.statusText = '' + this.upgrade = false + this.headers = [] + this.headersSize = 0 + this.headersMaxSize = client[kMaxHeadersSize] + this.shouldKeepAlive = false + this.paused = false + this.resume = this.resume.bind(this) - if (typeof factory !== 'function') { - throw new InvalidArgumentError('factory must be a function.') - } + this.bytesRead = 0 - this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) - ? opts.interceptors.BalancedPool - : [] - this[kFactory] = factory + this.keepAlive = '' + this.contentLength = '' + this.connection = '' + this.maxResponseSize = client[kMaxResponseSize] + } - for (const upstream of upstreams) { - this.addUpstream(upstream) + setTimeout (value, type) { + this.timeoutType = type + if (value !== this.timeoutValue) { + timers.clearTimeout(this.timeout) + if (value) { + this.timeout = timers.setTimeout(onParserTimeout, value, this) + // istanbul ignore else: only for jest + if (this.timeout.unref) { + this.timeout.unref() + } + } else { + this.timeout = null + } + this.timeoutValue = value + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } } - this._updateBalancedPoolStats() } - addUpstream (upstream) { - const upstreamOrigin = parseOrigin(upstream).origin - - if (this[kClients].find((pool) => ( - pool[kUrl].origin === upstreamOrigin && - pool.closed !== true && - pool.destroyed !== true - ))) { - return this + resume () { + if (this.socket.destroyed || !this.paused) { + return } - const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) - this[kAddClient](pool) - pool.on('connect', () => { - pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) - }) + assert(this.ptr != null) + assert(currentParser == null) - pool.on('connectionError', () => { - pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) - this._updateBalancedPoolStats() - }) + this.llhttp.llhttp_resume(this.ptr) - pool.on('disconnect', (...args) => { - const err = args[2] - if (err && err.code === 'UND_ERR_SOCKET') { - // decrease the weight of the pool. - pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) - this._updateBalancedPoolStats() + assert(this.timeoutType === TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } - }) - - for (const client of this[kClients]) { - client[kWeight] = this[kMaxWeightPerServer] } - this._updateBalancedPoolStats() - - return this - } - - _updateBalancedPoolStats () { - this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + this.paused = false + this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. + this.readMore() } - removeUpstream (upstream) { - const upstreamOrigin = parseOrigin(upstream).origin - - const pool = this[kClients].find((pool) => ( - pool[kUrl].origin === upstreamOrigin && - pool.closed !== true && - pool.destroyed !== true - )) - - if (pool) { - this[kRemoveClient](pool) + readMore () { + while (!this.paused && this.ptr) { + const chunk = this.socket.read() + if (chunk === null) { + break + } + this.execute(chunk) } - - return this - } - - get upstreams () { - return this[kClients] - .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) - .map((p) => p[kUrl].origin) } - [kGetDispatcher] () { - // We validate that pools is greater than 0, - // otherwise we would have to wait until an upstream - // is added, which might never happen. - if (this[kClients].length === 0) { - throw new BalancedPoolMissingUpstreamError() - } - - const dispatcher = this[kClients].find(dispatcher => ( - !dispatcher[kNeedDrain] && - dispatcher.closed !== true && - dispatcher.destroyed !== true - )) - - if (!dispatcher) { - return - } + execute (data) { + assert(this.ptr != null) + assert(currentParser == null) + assert(!this.paused) - const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) + const { socket, llhttp } = this - if (allClientsBusy) { - return + if (data.length > currentBufferSize) { + if (currentBufferPtr) { + llhttp.free(currentBufferPtr) + } + currentBufferSize = Math.ceil(data.length / 4096) * 4096 + currentBufferPtr = llhttp.malloc(currentBufferSize) } - let counter = 0 - - let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) - while (counter++ < this[kClients].length) { - this[kIndex] = (this[kIndex] + 1) % this[kClients].length - const pool = this[kClients][this[kIndex]] + // Call `execute` on the wasm parser. + // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, + // and finally the length of bytes to parse. + // The return value is an error code or `constants.ERROR.OK`. + try { + let ret - // find pool index with the largest weight - if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { - maxWeightIndex = this[kIndex] + try { + currentBufferRef = data + currentParser = this + ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) + /* eslint-disable-next-line no-useless-catch */ + } catch (err) { + /* istanbul ignore next: difficult to make a test case for */ + throw err + } finally { + currentParser = null + currentBufferRef = null } - // decrease the current weight every `this[kClients].length`. - if (this[kIndex] === 0) { - // Set the current weight to the next lower weight. - this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] + const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr - if (this[kCurrentWeight] <= 0) { - this[kCurrentWeight] = this[kMaxWeightPerServer] + if (ret === constants.ERROR.PAUSED_UPGRADE) { + this.onUpgrade(data.slice(offset)) + } else if (ret === constants.ERROR.PAUSED) { + this.paused = true + socket.unshift(data.slice(offset)) + } else if (ret !== constants.ERROR.OK) { + const ptr = llhttp.llhttp_get_error_reason(this.ptr) + let message = '' + /* istanbul ignore else: difficult to make a test case for */ + if (ptr) { + const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) + message = + 'Response does not match the HTTP/1.1 protocol (' + + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + + ')' } + throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) } - if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { - return pool - } + } catch (err) { + util.destroy(socket, err) } - - this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] - this[kIndex] = maxWeightIndex - return this[kClients][maxWeightIndex] } -} - -module.exports = BalancedPool + destroy () { + assert(this.ptr != null) + assert(currentParser == null) -/***/ }), + this.llhttp.llhttp_free(this.ptr) + this.ptr = null -/***/ 6101: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + timers.clearTimeout(this.timeout) + this.timeout = null + this.timeoutValue = null + this.timeoutType = null -"use strict"; + this.paused = false + } + onStatus (buf) { + this.statusText = buf.toString() + } -const { kConstruct } = __nccwpck_require__(9174) -const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(2396) -const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3983) -const { kHeadersList } = __nccwpck_require__(2785) -const { webidl } = __nccwpck_require__(1744) -const { Response, cloneResponse } = __nccwpck_require__(7823) -const { Request } = __nccwpck_require__(8359) -const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) -const { fetching } = __nccwpck_require__(4881) -const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(2538) -const assert = __nccwpck_require__(9491) -const { getGlobalDispatcher } = __nccwpck_require__(1892) + onMessageBegin () { + const { socket, client } = this -/** - * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation - * @typedef {Object} CacheBatchOperation - * @property {'delete' | 'put'} type - * @property {any} request - * @property {any} response - * @property {import('../../types/cache').CacheQueryOptions} options - */ + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } -/** - * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list - * @typedef {[any, any][]} requestResponseList - */ + const request = client[kQueue][client[kRunningIdx]] + if (!request) { + return -1 + } + } -class Cache { - /** - * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list - * @type {requestResponseList} - */ - #relevantRequestResponseList + onHeaderField (buf) { + const len = this.headers.length - constructor () { - if (arguments[0] !== kConstruct) { - webidl.illegalConstructor() + if ((len & 1) === 0) { + this.headers.push(buf) + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) } - this.#relevantRequestResponseList = arguments[1] + this.trackHeader(buf.length) } - async match (request, options = {}) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) - - request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + onHeaderValue (buf) { + let len = this.headers.length - const p = await this.matchAll(request, options) + if ((len & 1) === 1) { + this.headers.push(buf) + len += 1 + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } - if (p.length === 0) { - return + const key = this.headers[len - 2] + if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { + this.keepAlive += buf.toString() + } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { + this.connection += buf.toString() + } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { + this.contentLength += buf.toString() } - return p[0] + this.trackHeader(buf.length) } - async matchAll (request = undefined, options = {}) { - webidl.brandCheck(this, Cache) + trackHeader (len) { + this.headersSize += len + if (this.headersSize >= this.headersMaxSize) { + util.destroy(this.socket, new HeadersOverflowError()) + } + } - if (request !== undefined) request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + onUpgrade (head) { + const { upgrade, client, socket, headers, statusCode } = this - // 1. - let r = null + assert(upgrade) - // 2. - if (request !== undefined) { - if (request instanceof Request) { - // 2.1.1 - r = request[kState] + const request = client[kQueue][client[kRunningIdx]] + assert(request) - // 2.1.2 - if (r.method !== 'GET' && !options.ignoreMethod) { - return [] - } - } else if (typeof request === 'string') { - // 2.2.1 - r = new Request(request)[kState] - } - } + assert(!socket.destroyed) + assert(socket === client[kSocket]) + assert(!this.paused) + assert(request.upgrade || request.method === 'CONNECT') - // 5. - // 5.1 - const responses = [] + this.statusCode = null + this.statusText = '' + this.shouldKeepAlive = null - // 5.2 - if (request === undefined) { - // 5.2.1 - for (const requestResponse of this.#relevantRequestResponseList) { - responses.push(requestResponse[1]) - } - } else { // 5.3 - // 5.3.1 - const requestResponses = this.#queryCache(r, options) + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 - // 5.3.2 - for (const requestResponse of requestResponses) { - responses.push(requestResponse[1]) - } - } + socket.unshift(head) - // 5.4 - // We don't implement CORs so we don't need to loop over the responses, yay! + socket[kParser].destroy() + socket[kParser] = null - // 5.5.1 - const responseList = [] + socket[kClient] = null + socket[kError] = null + socket + .removeListener('error', onSocketError) + .removeListener('readable', onSocketReadable) + .removeListener('end', onSocketEnd) + .removeListener('close', onSocketClose) - // 5.5.2 - for (const response of responses) { - // 5.5.2.1 - const responseObject = new Response(response.body?.source ?? null) - const body = responseObject[kState].body - responseObject[kState] = response - responseObject[kState].body = body - responseObject[kHeaders][kHeadersList] = response.headersList - responseObject[kHeaders][kGuard] = 'immutable' + client[kSocket] = null + client[kQueue][client[kRunningIdx]++] = null + client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) - responseList.push(responseObject) + try { + request.onUpgrade(statusCode, headers, socket) + } catch (err) { + util.destroy(socket, err) } - // 6. - return Object.freeze(responseList) + resume(client) } - async add (request) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) + onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { + const { client, socket, headers, statusText } = this - request = webidl.converters.RequestInfo(request) + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } - // 1. - const requests = [request] + const request = client[kQueue][client[kRunningIdx]] - // 2. - const responseArrayPromise = this.addAll(requests) + /* istanbul ignore next: difficult to make a test case for */ + if (!request) { + return -1 + } - // 3. - return await responseArrayPromise - } + assert(!this.upgrade) + assert(this.statusCode < 200) - async addAll (requests) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) + if (statusCode === 100) { + util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) + return -1 + } - requests = webidl.converters['sequence'](requests) + /* this can only happen if server is misbehaving */ + if (upgrade && !request.upgrade) { + util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) + return -1 + } - // 1. - const responsePromises = [] + assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) - // 2. - const requestList = [] + this.statusCode = statusCode + this.shouldKeepAlive = ( + shouldKeepAlive || + // Override llhttp value which does not allow keepAlive for HEAD. + (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + ) - // 3. - for (const request of requests) { - if (typeof request === 'string') { - continue + if (this.statusCode >= 200) { + const bodyTimeout = request.bodyTimeout != null + ? request.bodyTimeout + : client[kBodyTimeout] + this.setTimeout(bodyTimeout, TIMEOUT_BODY) + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } + } - // 3.1 - const r = request[kState] + if (request.method === 'CONNECT') { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } - // 3.2 - if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { - throw webidl.errors.exception({ - header: 'Cache.addAll', - message: 'Expected http/s scheme when method is not GET.' - }) - } + if (upgrade) { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 } - // 4. - /** @type {ReturnType[]} */ - const fetchControllers = [] + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 - // 5. - for (const request of requests) { - // 5.1 - const r = new Request(request)[kState] + if (this.shouldKeepAlive && client[kPipelining]) { + const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null - // 5.2 - if (!urlIsHttpHttpsScheme(r.url)) { - throw webidl.errors.exception({ - header: 'Cache.addAll', - message: 'Expected http/s scheme.' - }) + if (keepAliveTimeout != null) { + const timeout = Math.min( + keepAliveTimeout - client[kKeepAliveTimeoutThreshold], + client[kKeepAliveMaxTimeout] + ) + if (timeout <= 0) { + socket[kReset] = true + } else { + client[kKeepAliveTimeoutValue] = timeout + } + } else { + client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] } + } else { + // Stop more requests from being dispatched. + socket[kReset] = true + } - // 5.4 - r.initiator = 'fetch' - r.destination = 'subresource' - - // 5.5 - requestList.push(r) - - // 5.6 - const responsePromise = createDeferredPromise() - - // 5.7 - fetchControllers.push(fetching({ - request: r, - dispatcher: getGlobalDispatcher(), - processResponse (response) { - // 1. - if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { - responsePromise.reject(webidl.errors.exception({ - header: 'Cache.addAll', - message: 'Received an invalid status code or the request failed.' - })) - } else if (response.headersList.contains('vary')) { // 2. - // 2.1 - const fieldValues = getFieldValues(response.headersList.get('vary')) - - // 2.2 - for (const fieldValue of fieldValues) { - // 2.2.1 - if (fieldValue === '*') { - responsePromise.reject(webidl.errors.exception({ - header: 'Cache.addAll', - message: 'invalid vary field value' - })) + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false - for (const controller of fetchControllers) { - controller.abort() - } + if (request.aborted) { + return -1 + } - return - } - } - } - }, - processResponseEndOfBody (response) { - // 1. - if (response.aborted) { - responsePromise.reject(new DOMException('aborted', 'AbortError')) - return - } + if (request.method === 'HEAD') { + return 1 + } - // 2. - responsePromise.resolve(response) - } - })) + if (statusCode < 200) { + return 1 + } - // 5.8 - responsePromises.push(responsePromise.promise) + if (socket[kBlocking]) { + socket[kBlocking] = false + resume(client) } - // 6. - const p = Promise.all(responsePromises) + return pause ? constants.ERROR.PAUSED : 0 + } - // 7. - const responses = await p + onBody (buf) { + const { client, socket, statusCode, maxResponseSize } = this - // 7.1 - const operations = [] + if (socket.destroyed) { + return -1 + } - // 7.2 - let index = 0 + const request = client[kQueue][client[kRunningIdx]] + assert(request) - // 7.3 - for (const response of responses) { - // 7.3.1 - /** @type {CacheBatchOperation} */ - const operation = { - type: 'put', // 7.3.2 - request: requestList[index], // 7.3.3 - response // 7.3.4 + assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() } + } - operations.push(operation) // 7.3.5 + assert(statusCode >= 200) - index++ // 7.3.6 + if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { + util.destroy(socket, new ResponseExceededMaxSizeError()) + return -1 } - // 7.5 - const cacheJobPromise = createDeferredPromise() + this.bytesRead += buf.length - // 7.6.1 - let errorData = null + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED + } + } - // 7.6.2 - try { - this.#batchCacheOperations(operations) - } catch (e) { - errorData = e + onMessageComplete () { + const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + + if (socket.destroyed && (!statusCode || shouldKeepAlive)) { + return -1 } - // 7.6.3 - queueMicrotask(() => { - // 7.6.3.1 - if (errorData === null) { - cacheJobPromise.resolve(undefined) - } else { - // 7.6.3.2 - cacheJobPromise.reject(errorData) - } - }) + if (upgrade) { + return + } - // 7.7 - return cacheJobPromise.promise - } + const request = client[kQueue][client[kRunningIdx]] + assert(request) - async put (request, response) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) + assert(statusCode >= 100) - request = webidl.converters.RequestInfo(request) - response = webidl.converters.Response(response) + this.statusCode = null + this.statusText = '' + this.bytesRead = 0 + this.contentLength = '' + this.keepAlive = '' + this.connection = '' - // 1. - let innerRequest = null + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 - // 2. - if (request instanceof Request) { - innerRequest = request[kState] - } else { // 3. - innerRequest = new Request(request)[kState] + if (statusCode < 200) { + return } - // 4. - if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Expected an http/s scheme when method is not GET' - }) + /* istanbul ignore next: should be handled by llhttp? */ + if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { + util.destroy(socket, new ResponseContentLengthMismatchError()) + return -1 } - // 5. - const innerResponse = response[kState] + request.onComplete(headers) - // 6. - if (innerResponse.status === 206) { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Got 206 status' - }) + client[kQueue][client[kRunningIdx]++] = null + + if (socket[kWriting]) { + assert.strictEqual(client[kRunning], 0) + // Response completed before request. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (!shouldKeepAlive) { + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (socket[kReset] && client[kRunning] === 0) { + // Destroy socket once all requests have completed. + // The request at the tail of the pipeline is the one + // that requested reset and no further requests should + // have been queued since then. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (client[kPipelining] === 1) { + // We must wait a full event loop cycle to reuse this socket to make sure + // that non-spec compliant servers are not closing the connection even if they + // said they won't. + setImmediate(resume, client) + } else { + resume(client) } + } +} - // 7. - if (innerResponse.headersList.contains('vary')) { - // 7.1. - const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) +function onParserTimeout (parser) { + const { socket, timeoutType, client } = parser - // 7.2. - for (const fieldValue of fieldValues) { - // 7.2.1 - if (fieldValue === '*') { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Got * vary field value' - }) - } - } + /* istanbul ignore else */ + if (timeoutType === TIMEOUT_HEADERS) { + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) } - - // 8. - if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Response body is locked or disturbed' - }) + } else if (timeoutType === TIMEOUT_BODY) { + if (!parser.paused) { + util.destroy(socket, new BodyTimeoutError()) } + } else if (timeoutType === TIMEOUT_IDLE) { + assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) + util.destroy(socket, new InformationalError('socket idle timeout')) + } +} - // 9. - const clonedResponse = cloneResponse(innerResponse) +function onSocketReadable () { + const { [kParser]: parser } = this + if (parser) { + parser.readMore() + } +} - // 10. - const bodyReadPromise = createDeferredPromise() +function onSocketError (err) { + const { [kClient]: client, [kParser]: parser } = this - // 11. - if (innerResponse.body != null) { - // 11.1 - const stream = innerResponse.body.stream + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + if (client[kHTTPConnVersion] !== 'h2') { + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded + // to the user. + if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so for as a valid response. + parser.onMessageComplete() + return + } + } + + this[kError] = err + + onError(this[kClient], err) +} - // 11.2 - const reader = stream.getReader() +function onError (client, err) { + if ( + client[kRunning] === 0 && + err.code !== 'UND_ERR_INFO' && + err.code !== 'UND_ERR_SOCKET' + ) { + // Error is not caused by running request and not a recoverable + // socket error. - // 11.3 - readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) - } else { - bodyReadPromise.resolve(undefined) + assert(client[kPendingIdx] === client[kRunningIdx]) + + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) } + assert(client[kSize] === 0) + } +} - // 12. - /** @type {CacheBatchOperation[]} */ - const operations = [] +function onSocketEnd () { + const { [kParser]: parser, [kClient]: client } = this - // 13. - /** @type {CacheBatchOperation} */ - const operation = { - type: 'put', // 14. - request: innerRequest, // 15. - response: clonedResponse // 16. + if (client[kHTTPConnVersion] !== 'h2') { + if (parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + return } + } - // 17. - operations.push(operation) + util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +} - // 19. - const bytes = await bodyReadPromise.promise +function onSocketClose () { + const { [kClient]: client, [kParser]: parser } = this - if (clonedResponse.body != null) { - clonedResponse.body.source = bytes + if (client[kHTTPConnVersion] === 'h1' && parser) { + if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() } - // 19.1 - const cacheJobPromise = createDeferredPromise() + this[kParser].destroy() + this[kParser] = null + } - // 19.2.1 - let errorData = null + const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) - // 19.2.2 - try { - this.#batchCacheOperations(operations) - } catch (e) { - errorData = e - } + client[kSocket] = null - // 19.2.3 - queueMicrotask(() => { - // 19.2.3.1 - if (errorData === null) { - cacheJobPromise.resolve() - } else { // 19.2.3.2 - cacheJobPromise.reject(errorData) - } - }) + if (client.destroyed) { + assert(client[kPending] === 0) - return cacheJobPromise.promise - } + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null - async delete (request, options = {}) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) + errorRequest(client, request, err) + } - request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + client[kPendingIdx] = client[kRunningIdx] - /** - * @type {Request} - */ - let r = null + assert(client[kRunning] === 0) - if (request instanceof Request) { - r = request[kState] + client.emit('disconnect', client[kUrl], [client], err) - if (r.method !== 'GET' && !options.ignoreMethod) { - return false - } - } else { - assert(typeof request === 'string') + resume(client) +} - r = new Request(request)[kState] - } +async function connect (client) { + assert(!client[kConnecting]) + assert(!client[kSocket]) - /** @type {CacheBatchOperation[]} */ - const operations = [] + let { host, hostname, protocol, port } = client[kUrl] - /** @type {CacheBatchOperation} */ - const operation = { - type: 'delete', - request: r, - options - } + // Resolve ipv6 + if (hostname[0] === '[') { + const idx = hostname.indexOf(']') - operations.push(operation) + assert(idx !== -1) + const ip = hostname.substring(1, idx) - const cacheJobPromise = createDeferredPromise() + assert(net.isIP(ip)) + hostname = ip + } - let errorData = null - let requestResponses + client[kConnecting] = true - try { - requestResponses = this.#batchCacheOperations(operations) - } catch (e) { - errorData = e - } + if (channels.beforeConnect.hasSubscribers) { + channels.beforeConnect.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector] + }) + } - queueMicrotask(() => { - if (errorData === null) { - cacheJobPromise.resolve(!!requestResponses?.length) - } else { - cacheJobPromise.reject(errorData) - } + try { + const socket = await new Promise((resolve, reject) => { + client[kConnector]({ + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, (err, socket) => { + if (err) { + reject(err) + } else { + resolve(socket) + } + }) }) - return cacheJobPromise.promise - } + if (client.destroyed) { + util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) + return + } - /** - * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys - * @param {any} request - * @param {import('../../types/cache').CacheQueryOptions} options - * @returns {readonly Request[]} - */ - async keys (request = undefined, options = {}) { - webidl.brandCheck(this, Cache) + client[kConnecting] = false - if (request !== undefined) request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + assert(socket) - // 1. - let r = null + const isH2 = socket.alpnProtocol === 'h2' + if (isH2) { + if (!h2ExperimentalWarned) { + h2ExperimentalWarned = true + process.emitWarning('H2 support is experimental, expect them to change at any time.', { + code: 'UNDICI-H2' + }) + } - // 2. - if (request !== undefined) { - // 2.1 - if (request instanceof Request) { - // 2.1.1 - r = request[kState] + const session = http2.connect(client[kUrl], { + createConnection: () => socket, + peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams + }) - // 2.1.2 - if (r.method !== 'GET' && !options.ignoreMethod) { - return [] - } - } else if (typeof request === 'string') { // 2.2 - r = new Request(request)[kState] + client[kHTTPConnVersion] = 'h2' + session[kClient] = client + session[kSocket] = socket + session.on('error', onHttp2SessionError) + session.on('frameError', onHttp2FrameError) + session.on('end', onHttp2SessionEnd) + session.on('goaway', onHTTP2GoAway) + session.on('close', onSocketClose) + session.unref() + + client[kHTTP2Session] = session + socket[kHTTP2Session] = session + } else { + if (!llhttpInstance) { + llhttpInstance = await llhttpPromise + llhttpPromise = null } + + socket[kNoRef] = false + socket[kWriting] = false + socket[kReset] = false + socket[kBlocking] = false + socket[kParser] = new Parser(client, socket, llhttpInstance) } - // 4. - const promise = createDeferredPromise() + socket[kCounter] = 0 + socket[kMaxRequests] = client[kMaxRequests] + socket[kClient] = client + socket[kError] = null - // 5. - // 5.1 - const requests = [] + socket + .on('error', onSocketError) + .on('readable', onSocketReadable) + .on('end', onSocketEnd) + .on('close', onSocketClose) - // 5.2 - if (request === undefined) { - // 5.2.1 - for (const requestResponse of this.#relevantRequestResponseList) { - // 5.2.1.1 - requests.push(requestResponse[0]) - } - } else { // 5.3 - // 5.3.1 - const requestResponses = this.#queryCache(r, options) + client[kSocket] = socket - // 5.3.2 - for (const requestResponse of requestResponses) { - // 5.3.2.1 - requests.push(requestResponse[0]) - } + if (channels.connected.hasSubscribers) { + channels.connected.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + socket + }) + } + client.emit('connect', client[kUrl], [client]) + } catch (err) { + if (client.destroyed) { + return } - // 5.4 - queueMicrotask(() => { - // 5.4.1 - const requestList = [] + client[kConnecting] = false - // 5.4.2 - for (const request of requests) { - const requestObject = new Request('https://a') - requestObject[kState] = request - requestObject[kHeaders][kHeadersList] = request.headersList - requestObject[kHeaders][kGuard] = 'immutable' - requestObject[kRealm] = request.client + if (channels.connectError.hasSubscribers) { + channels.connectError.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + error: err + }) + } - // 5.4.2.1 - requestList.push(requestObject) + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + assert(client[kRunning] === 0) + while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { + const request = client[kQueue][client[kPendingIdx]++] + errorRequest(client, request, err) } + } else { + onError(client, err) + } - // 5.4.3 - promise.resolve(Object.freeze(requestList)) - }) - - return promise.promise + client.emit('connectionError', client[kUrl], [client], err) } - /** - * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm - * @param {CacheBatchOperation[]} operations - * @returns {requestResponseList} - */ - #batchCacheOperations (operations) { - // 1. - const cache = this.#relevantRequestResponseList + resume(client) +} - // 2. - const backupCache = [...cache] +function emitDrain (client) { + client[kNeedDrain] = 0 + client.emit('drain', client[kUrl], [client]) +} - // 3. - const addedItems = [] +function resume (client, sync) { + if (client[kResuming] === 2) { + return + } - // 4.1 - const resultList = [] + client[kResuming] = 2 - try { - // 4.2 - for (const operation of operations) { - // 4.2.1 - if (operation.type !== 'delete' && operation.type !== 'put') { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'operation type does not match "delete" or "put"' - }) - } + _resume(client, sync) + client[kResuming] = 0 - // 4.2.2 - if (operation.type === 'delete' && operation.response != null) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'delete operation should not have an associated response' - }) - } + if (client[kRunningIdx] > 256) { + client[kQueue].splice(0, client[kRunningIdx]) + client[kPendingIdx] -= client[kRunningIdx] + client[kRunningIdx] = 0 + } +} - // 4.2.3 - if (this.#queryCache(operation.request, operation.options, addedItems).length) { - throw new DOMException('???', 'InvalidStateError') - } +function _resume (client, sync) { + while (true) { + if (client.destroyed) { + assert(client[kPending] === 0) + return + } - // 4.2.4 - let requestResponses + if (client[kClosedResolve] && !client[kSize]) { + client[kClosedResolve]() + client[kClosedResolve] = null + return + } - // 4.2.5 - if (operation.type === 'delete') { - // 4.2.5.1 - requestResponses = this.#queryCache(operation.request, operation.options) + const socket = client[kSocket] - // TODO: the spec is wrong, this is needed to pass WPTs - if (requestResponses.length === 0) { - return [] - } + if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { + if (client[kSize] === 0) { + if (!socket[kNoRef] && socket.unref) { + socket.unref() + socket[kNoRef] = true + } + } else if (socket[kNoRef] && socket.ref) { + socket.ref() + socket[kNoRef] = false + } - // 4.2.5.2 - for (const requestResponse of requestResponses) { - const idx = cache.indexOf(requestResponse) - assert(idx !== -1) + if (client[kSize] === 0) { + if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + } + } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { + if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { + const request = client[kQueue][client[kRunningIdx]] + const headersTimeout = request.headersTimeout != null + ? request.headersTimeout + : client[kHeadersTimeout] + socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + } + } + } - // 4.2.5.2.1 - cache.splice(idx, 1) - } - } else if (operation.type === 'put') { // 4.2.6 - // 4.2.6.1 - if (operation.response == null) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'put operation should have an associated response' - }) - } + if (client[kBusy]) { + client[kNeedDrain] = 2 + } else if (client[kNeedDrain] === 2) { + if (sync) { + client[kNeedDrain] = 1 + process.nextTick(emitDrain, client) + } else { + emitDrain(client) + } + continue + } - // 4.2.6.2 - const r = operation.request + if (client[kPending] === 0) { + return + } - // 4.2.6.3 - if (!urlIsHttpHttpsScheme(r.url)) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'expected http or https scheme' - }) - } + if (client[kRunning] >= (client[kPipelining] || 1)) { + return + } - // 4.2.6.4 - if (r.method !== 'GET') { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'not get method' - }) - } + const request = client[kQueue][client[kPendingIdx]] - // 4.2.6.5 - if (operation.options != null) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'options must not be defined' - }) - } + if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { + if (client[kRunning] > 0) { + return + } - // 4.2.6.6 - requestResponses = this.#queryCache(operation.request) + client[kServerName] = request.servername - // 4.2.6.7 - for (const requestResponse of requestResponses) { - const idx = cache.indexOf(requestResponse) - assert(idx !== -1) + if (socket && socket.servername !== request.servername) { + util.destroy(socket, new InformationalError('servername changed')) + return + } + } + + if (client[kConnecting]) { + return + } - // 4.2.6.7.1 - cache.splice(idx, 1) - } + if (!socket && !client[kHTTP2Session]) { + connect(client) + return + } - // 4.2.6.8 - cache.push([operation.request, operation.response]) + if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { + return + } - // 4.2.6.10 - addedItems.push([operation.request, operation.response]) - } + if (client[kRunning] > 0 && !request.idempotent) { + // Non-idempotent request cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } - // 4.2.7 - resultList.push([operation.request, operation.response]) - } + if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { + // Don't dispatch an upgrade until all preceding requests have completed. + // A misbehaving server might upgrade the connection before all pipelined + // request has completed. + return + } - // 4.3 - return resultList - } catch (e) { // 5. - // 5.1 - this.#relevantRequestResponseList.length = 0 + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && + (util.isStream(request.body) || util.isAsyncIterable(request.body))) { + // Request with stream or iterator body can error while other requests + // are inflight and indirectly error those as well. + // Ensure this doesn't happen by waiting for inflight + // to complete before dispatching. - // 5.2 - this.#relevantRequestResponseList = backupCache + // Request with stream or iterator body cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } - // 5.3 - throw e + if (!request.aborted && write(client, request)) { + client[kPendingIdx]++ + } else { + client[kQueue].splice(client[kPendingIdx], 1) } } +} - /** - * @see https://w3c.github.io/ServiceWorker/#query-cache - * @param {any} requestQuery - * @param {import('../../types/cache').CacheQueryOptions} options - * @param {requestResponseList} targetStorage - * @returns {requestResponseList} - */ - #queryCache (requestQuery, options, targetStorage) { - /** @type {requestResponseList} */ - const resultList = [] +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} - const storage = targetStorage ?? this.#relevantRequestResponseList +function write (client, request) { + if (client[kHTTPConnVersion] === 'h2') { + writeH2(client, client[kHTTP2Session], request) + return + } - for (const requestResponse of storage) { - const [cachedRequest, cachedResponse] = requestResponse - if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { - resultList.push(requestResponse) - } - } + const { body, method, path, host, upgrade, headers, blocking, reset } = request - return resultList + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) } - /** - * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm - * @param {any} requestQuery - * @param {any} request - * @param {any | null} response - * @param {import('../../types/cache').CacheQueryOptions | undefined} options - * @returns {boolean} - */ - #requestMatchesCachedItem (requestQuery, request, response = null, options) { - // if (options?.ignoreMethod === false && request.method === 'GET') { - // return false - // } + const bodyLength = util.bodyLength(body) - const queryURL = new URL(requestQuery.url) + let contentLength = bodyLength - const cachedURL = new URL(request.url) + if (contentLength === null) { + contentLength = request.contentLength + } - if (options?.ignoreSearch) { - cachedURL.search = '' + if (contentLength === 0 && !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. - queryURL.search = '' - } + contentLength = null + } - if (!urlEquals(queryURL, cachedURL, true)) { + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) return false } - if ( - response == null || - options?.ignoreVary || - !response.headersList.contains('vary') - ) { - return true - } + process.emitWarning(new RequestContentLengthMismatchError()) + } - const fieldValues = getFieldValues(response.headersList.get('vary')) + const socket = client[kSocket] - for (const fieldValue of fieldValues) { - if (fieldValue === '*') { - return false + try { + request.onConnect((err) => { + if (request.aborted || request.completed) { + return } - const requestValue = request.headersList.get(fieldValue) - const queryValue = requestQuery.headersList.get(fieldValue) + errorRequest(client, request, err || new RequestAbortedError()) - // If one has the header and the other doesn't, or one has - // a different value than the other, return false - if (requestValue !== queryValue) { - return false - } - } + util.destroy(socket, new InformationalError('aborted')) + }) + } catch (err) { + errorRequest(client, request, err) + } - return true + if (request.aborted) { + return false } -} -Object.defineProperties(Cache.prototype, { - [Symbol.toStringTag]: { - value: 'Cache', - configurable: true - }, - match: kEnumerableProperty, - matchAll: kEnumerableProperty, - add: kEnumerableProperty, - addAll: kEnumerableProperty, - put: kEnumerableProperty, - delete: kEnumerableProperty, - keys: kEnumerableProperty -}) + if (method === 'HEAD') { + // https://github.com/mcollina/undici/issues/258 + // Close after a HEAD request to interop with misbehaving servers + // that may send a body in the response. -const cacheQueryOptionConverters = [ - { - key: 'ignoreSearch', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'ignoreMethod', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'ignoreVary', - converter: webidl.converters.boolean, - defaultValue: false + socket[kReset] = true } -] -webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) + if (upgrade || method === 'CONNECT') { + // On CONNECT or upgrade, block pipeline from dispatching further + // requests on this connection. -webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ - ...cacheQueryOptionConverters, - { - key: 'cacheName', - converter: webidl.converters.DOMString + socket[kReset] = true } -]) -webidl.converters.Response = webidl.interfaceConverter(Response) - -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.RequestInfo -) + if (reset != null) { + socket[kReset] = reset + } -module.exports = { - Cache -} + if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { + socket[kReset] = true + } + if (blocking) { + socket[kBlocking] = true + } -/***/ }), + let header = `${method} ${path} HTTP/1.1\r\n` -/***/ 7907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof host === 'string') { + header += `host: ${host}\r\n` + } else { + header += client[kHostHeader] + } -"use strict"; + if (upgrade) { + header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` + } else if (client[kPipelining] && !socket[kReset]) { + header += 'connection: keep-alive\r\n' + } else { + header += 'connection: close\r\n' + } + if (headers) { + header += headers + } -const { kConstruct } = __nccwpck_require__(9174) -const { Cache } = __nccwpck_require__(6101) -const { webidl } = __nccwpck_require__(1744) -const { kEnumerableProperty } = __nccwpck_require__(3983) + if (channels.sendHeaders.hasSubscribers) { + channels.sendHeaders.publish({ request, headers: header, socket }) + } -class CacheStorage { - /** - * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map - * @type {Map { + if (request.aborted || request.completed) { + return } - } else { // 2. - // 2.2 - for (const cacheList of this.#caches.values()) { - const cache = new Cache(kConstruct, cacheList) - // 2.2.1.2 - const response = await cache.match(request, options) + errorRequest(client, request, err || new RequestAbortedError()) + }) + } catch (err) { + errorRequest(client, request, err) + } - if (response !== undefined) { - return response - } - } - } + if (request.aborted) { + return false } - /** - * @see https://w3c.github.io/ServiceWorker/#cache-storage-has - * @param {string} cacheName - * @returns {Promise} - */ - async has (cacheName) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + /** @type {import('node:http2').ClientHttp2Stream} */ + let stream + const h2State = client[kHTTP2SessionState] - cacheName = webidl.converters.DOMString(cacheName) + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] + headers[HTTP2_HEADER_METHOD] = method - // 2.1.1 - // 2.2 - return this.#caches.has(cacheName) + if (method === 'CONNECT') { + session.ref() + // we are already connected, streams are pending, first request + // will create a new stream. We trigger a request to create the stream and wait until + // `ready` event is triggered + // We disabled endStream to allow the user to write to the stream + stream = session.request(headers, { endStream: false, signal }) + + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + } else { + stream.once('ready', () => { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + }) + } + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) session.unref() + }) + + return true } - /** - * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open - * @param {string} cacheName - * @returns {Promise} - */ - async open (cacheName) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) + // https://tools.ietf.org/html/rfc7540#section-8.3 + // :path and :scheme headers must be omited when sending CONNECT - cacheName = webidl.converters.DOMString(cacheName) + headers[HTTP2_HEADER_PATH] = path + headers[HTTP2_HEADER_SCHEME] = 'https' - // 2.1 - if (this.#caches.has(cacheName)) { - // await caches.open('v1') !== await caches.open('v1') + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 - // 2.1.1 - const cache = this.#caches.get(cacheName) + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. - // 2.1.1.1 - return new Cache(kConstruct, cache) - } + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) - // 2.2 - const cache = [] + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } - // 2.3 - this.#caches.set(cacheName, cache) + let contentLength = util.bodyLength(body) - // 2.4 - return new Cache(kConstruct, cache) + if (contentLength == null) { + contentLength = request.contentLength } - /** - * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete - * @param {string} cacheName - * @returns {Promise} - */ - async delete (cacheName) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) + if (contentLength === 0 || !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. - cacheName = webidl.converters.DOMString(cacheName) + contentLength = null + } - return this.#caches.delete(cacheName) + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) } - /** - * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys - * @returns {string[]} - */ - async keys () { - webidl.brandCheck(this, CacheStorage) + if (contentLength != null) { + assert(body, 'no body must not have content length') + headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` + } - // 2.1 - const keys = this.#caches.keys() + session.ref() - // 2.2 - return [...keys] + const shouldEndStream = method === 'GET' || method === 'HEAD' + if (expectContinue) { + headers[HTTP2_HEADER_EXPECT] = '100-continue' + stream = session.request(headers, { endStream: shouldEndStream, signal }) + + stream.once('continue', writeBodyH2) + } else { + stream = session.request(headers, { + endStream: shouldEndStream, + signal + }) + writeBodyH2() } -} -Object.defineProperties(CacheStorage.prototype, { - [Symbol.toStringTag]: { - value: 'CacheStorage', - configurable: true - }, - match: kEnumerableProperty, - has: kEnumerableProperty, - open: kEnumerableProperty, - delete: kEnumerableProperty, - keys: kEnumerableProperty -}) + // Increment counter as we have new several streams open + ++h2State.openStreams -module.exports = { - CacheStorage -} + stream.once('response', headers => { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { + stream.pause() + } + }) + stream.once('end', () => { + request.onComplete([]) + }) -/***/ }), + stream.on('data', (chunk) => { + if (request.onData(chunk) === false) { + stream.pause() + } + }) -/***/ 9174: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) { + session.unref() + } + }) -"use strict"; + stream.once('error', function (err) { + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + stream.once('frameError', (type, code) => { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + errorRequest(client, request, err) -module.exports = { - kConstruct: (__nccwpck_require__(2785).kConstruct) -} + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + // stream.on('aborted', () => { + // // TODO(HTTP/2): Support aborted + // }) -/***/ }), + // stream.on('timeout', () => { + // // TODO(HTTP/2): Support timeout + // }) -/***/ 2396: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // stream.on('push', headers => { + // // TODO(HTTP/2): Suppor push + // }) -"use strict"; + // stream.on('trailers', headers => { + // // TODO(HTTP/2): Support trailers + // }) + return true -const assert = __nccwpck_require__(9491) -const { URLSerializer } = __nccwpck_require__(685) -const { isValidHeaderName } = __nccwpck_require__(2538) + function writeBodyH2 () { + /* istanbul ignore else: assertion */ + if (!body) { + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + stream.cork() + stream.write(body) + stream.uncork() + stream.end() + request.onBodySent(body) + request.onRequestSent() + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ + client, + request, + contentLength, + h2stream: stream, + expectsPayload, + body: body.stream(), + socket: client[kSocket], + header: '' + }) + } else { + writeBlob({ + body, + client, + request, + contentLength, + expectsPayload, + h2stream: stream, + header: '', + socket: client[kSocket] + }) + } + } else if (util.isStream(body)) { + writeStream({ + body, + client, + request, + contentLength, + expectsPayload, + socket: client[kSocket], + h2stream: stream, + header: '' + }) + } else if (util.isIterable(body)) { + writeIterable({ + body, + client, + request, + contentLength, + expectsPayload, + header: '', + h2stream: stream, + socket: client[kSocket] + }) + } else { + assert(false) + } + } +} -/** - * @see https://url.spec.whatwg.org/#concept-url-equals - * @param {URL} A - * @param {URL} B - * @param {boolean | undefined} excludeFragment - * @returns {boolean} - */ -function urlEquals (A, B, excludeFragment = false) { - const serializedA = URLSerializer(A, excludeFragment) +function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') - const serializedB = URLSerializer(B, excludeFragment) + if (client[kHTTPConnVersion] === 'h2') { + // For HTTP/2, is enough to pipe the stream + const pipe = pipeline( + body, + h2stream, + (err) => { + if (err) { + util.destroy(body, err) + util.destroy(h2stream, err) + } else { + request.onRequestSent() + } + } + ) - return serializedA === serializedB -} + pipe.on('data', onPipeData) + pipe.once('end', () => { + pipe.removeListener('data', onPipeData) + util.destroy(pipe) + }) -/** - * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 - * @param {string} header - */ -function fieldValues (header) { - assert(header !== null) + function onPipeData (chunk) { + request.onBodySent(chunk) + } - const values = [] + return + } - for (let value of header.split(',')) { - value = value.trim() + let finished = false - if (!value.length) { - continue - } else if (!isValidHeaderName(value)) { - continue + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + + const onData = function (chunk) { + if (finished) { + return } - values.push(value) + try { + if (!writer.write(chunk) && this.pause) { + this.pause() + } + } catch (err) { + util.destroy(this, err) + } } + const onDrain = function () { + if (finished) { + return + } - return values -} + if (body.resume) { + body.resume() + } + } + const onAbort = function () { + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) + } + const onFinished = function (err) { + if (finished) { + return + } -module.exports = { - urlEquals, - fieldValues -} + finished = true + assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) -/***/ }), + socket + .off('drain', onDrain) + .off('error', onFinished) -/***/ 3598: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + body + .removeListener('data', onData) + .removeListener('end', onFinished) + .removeListener('error', onFinished) + .removeListener('close', onAbort) -"use strict"; -// @ts-check + if (!err) { + try { + writer.end() + } catch (er) { + err = er + } + } + writer.destroy(err) + if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { + util.destroy(body, err) + } else { + util.destroy(body) + } + } -/* global WebAssembly */ + body + .on('data', onData) + .on('end', onFinished) + .on('error', onFinished) + .on('close', onAbort) -const assert = __nccwpck_require__(9491) -const net = __nccwpck_require__(1808) -const http = __nccwpck_require__(3685) -const { pipeline } = __nccwpck_require__(2781) -const util = __nccwpck_require__(3983) -const timers = __nccwpck_require__(9459) -const Request = __nccwpck_require__(2905) -const DispatcherBase = __nccwpck_require__(4839) -const { - RequestContentLengthMismatchError, - ResponseContentLengthMismatchError, - InvalidArgumentError, - RequestAbortedError, - HeadersTimeoutError, - HeadersOverflowError, - SocketError, - InformationalError, - BodyTimeoutError, - HTTPParserError, - ResponseExceededMaxSizeError, - ClientDestroyedError -} = __nccwpck_require__(8045) -const buildConnector = __nccwpck_require__(2067) -const { - kUrl, - kReset, - kServerName, - kClient, - kBusy, - kParser, - kConnect, - kBlocking, - kResuming, - kRunning, - kPending, - kSize, - kWriting, - kQueue, - kConnected, - kConnecting, - kNeedDrain, - kNoRef, - kKeepAliveDefaultTimeout, - kHostHeader, - kPendingIdx, - kRunningIdx, - kError, - kPipelining, - kSocket, - kKeepAliveTimeoutValue, - kMaxHeadersSize, - kKeepAliveMaxTimeout, - kKeepAliveTimeoutThreshold, - kHeadersTimeout, - kBodyTimeout, - kStrictContentLength, - kConnector, - kMaxRedirections, - kMaxRequests, - kCounter, - kClose, - kDestroy, - kDispatch, - kInterceptors, - kLocalAddress, - kMaxResponseSize, - kHTTPConnVersion, - // HTTP2 - kHost, - kHTTP2Session, - kHTTP2SessionState, - kHTTP2BuildRequest, - kHTTP2CopyHeaders, - kHTTP1BuildRequest -} = __nccwpck_require__(2785) + if (body.resume) { + body.resume() + } -/** @type {import('http2')} */ -let http2 -try { - http2 = __nccwpck_require__(5158) -} catch { - // @ts-ignore - http2 = { constants: {} } + socket + .on('drain', onDrain) + .on('error', onFinished) } -const { - constants: { - HTTP2_HEADER_AUTHORITY, - HTTP2_HEADER_METHOD, - HTTP2_HEADER_PATH, - HTTP2_HEADER_SCHEME, - HTTP2_HEADER_CONTENT_LENGTH, - HTTP2_HEADER_EXPECT, - HTTP2_HEADER_STATUS - } -} = http2 +async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength === body.size, 'blob body must have content length') -// Experimental -let h2ExperimentalWarned = false + const isH2 = client[kHTTPConnVersion] === 'h2' + try { + if (contentLength != null && contentLength !== body.size) { + throw new RequestContentLengthMismatchError() + } -const FastBuffer = Buffer[Symbol.species] + const buffer = Buffer.from(await body.arrayBuffer()) -const kClosedResolve = Symbol('kClosedResolve') + if (isH2) { + h2stream.cork() + h2stream.write(buffer) + h2stream.uncork() + } else { + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(buffer) + socket.uncork() + } -const channels = {} + request.onBodySent(buffer) + request.onRequestSent() -try { - const diagnosticsChannel = __nccwpck_require__(7643) - channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') - channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') - channels.connectError = diagnosticsChannel.channel('undici:client:connectError') - channels.connected = diagnosticsChannel.channel('undici:client:connected') -} catch { - channels.sendHeaders = { hasSubscribers: false } - channels.beforeConnect = { hasSubscribers: false } - channels.connectError = { hasSubscribers: false } - channels.connected = { hasSubscribers: false } + if (!expectsPayload) { + socket[kReset] = true + } + + resume(client) + } catch (err) { + util.destroy(isH2 ? h2stream : socket, err) + } } -/** - * @type {import('../types/client').default} - */ -class Client extends DispatcherBase { - /** - * - * @param {string|URL} url - * @param {import('../types/client').Client.Options} options - */ - constructor (url, { - interceptors, - maxHeaderSize, - headersTimeout, - socketTimeout, - requestTimeout, - connectTimeout, - bodyTimeout, - idleTimeout, - keepAlive, - keepAliveTimeout, - maxKeepAliveTimeout, - keepAliveMaxTimeout, - keepAliveTimeoutThreshold, - socketPath, - pipelining, - tls, - strictContentLength, - maxCachedSessions, - maxRedirections, - connect, - maxRequestsPerClient, - localAddress, - maxResponseSize, - autoSelectFamily, - autoSelectFamilyAttemptTimeout, - // h2 - allowH2, - maxConcurrentStreams - } = {}) { - super() +async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') - if (keepAlive !== undefined) { - throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') + let callback = null + function onDrain () { + if (callback) { + const cb = callback + callback = null + cb() } + } - if (socketTimeout !== undefined) { - throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') - } + const waitForDrain = () => new Promise((resolve, reject) => { + assert(callback === null) - if (requestTimeout !== undefined) { - throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + if (socket[kError]) { + reject(socket[kError]) + } else { + callback = resolve } + }) - if (idleTimeout !== undefined) { - throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') - } + if (client[kHTTPConnVersion] === 'h2') { + h2stream + .on('close', onDrain) + .on('drain', onDrain) - if (maxKeepAliveTimeout !== undefined) { - throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') - } + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } - if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { - throw new InvalidArgumentError('invalid maxHeaderSize') + const res = h2stream.write(chunk) + request.onBodySent(chunk) + if (!res) { + await waitForDrain() + } + } + } catch (err) { + h2stream.destroy(err) + } finally { + request.onRequestSent() + h2stream.end() + h2stream + .off('close', onDrain) + .off('drain', onDrain) } - if (socketPath != null && typeof socketPath !== 'string') { - throw new InvalidArgumentError('invalid socketPath') - } + return + } - if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { - throw new InvalidArgumentError('invalid connectTimeout') - } + socket + .on('close', onDrain) + .on('drain', onDrain) - if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { - throw new InvalidArgumentError('invalid keepAliveTimeout') - } + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } - if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { - throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + if (!writer.write(chunk)) { + await waitForDrain() + } } - if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { - throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') - } + writer.end() + } catch (err) { + writer.destroy(err) + } finally { + socket + .off('close', onDrain) + .off('drain', onDrain) + } +} - if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { - throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') - } +class AsyncWriter { + constructor ({ socket, request, contentLength, client, expectsPayload, header }) { + this.socket = socket + this.request = request + this.contentLength = contentLength + this.client = client + this.bytesWritten = 0 + this.expectsPayload = expectsPayload + this.header = header - if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { - throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') - } + socket[kWriting] = true + } - if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { - throw new InvalidArgumentError('connect must be a function or an object') - } + write (chunk) { + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this - if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { - throw new InvalidArgumentError('maxRedirections must be a positive number') + if (socket[kError]) { + throw socket[kError] } - if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { - throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + if (socket.destroyed) { + return false } - if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { - throw new InvalidArgumentError('localAddress must be valid string IP address') + const len = Buffer.byteLength(chunk) + if (!len) { + return true } - if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { - throw new InvalidArgumentError('maxResponseSize must be a positive number') - } + // We should defer writing chunks. + if (contentLength !== null && bytesWritten + len > contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } - if ( - autoSelectFamilyAttemptTimeout != null && - (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) - ) { - throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') + process.emitWarning(new RequestContentLengthMismatchError()) } - // h2 - if (allowH2 != null && typeof allowH2 !== 'boolean') { - throw new InvalidArgumentError('allowH2 must be a valid boolean value') - } + socket.cork() - if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { - throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') - } + if (bytesWritten === 0) { + if (!expectsPayload) { + socket[kReset] = true + } - if (typeof connect !== 'function') { - connect = buildConnector({ - ...tls, - maxCachedSessions, - allowH2, - socketPath, - timeout: connectTimeout, - ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), - ...connect - }) + if (contentLength === null) { + socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') + } else { + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + } } - this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) - ? interceptors.Client - : [createRedirectInterceptor({ maxRedirections })] - this[kUrl] = util.parseOrigin(url) - this[kConnector] = connect - this[kSocket] = null - this[kPipelining] = pipelining != null ? pipelining : 1 - this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize - this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout - this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout - this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold - this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] - this[kServerName] = null - this[kLocalAddress] = localAddress != null ? localAddress : null - this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming - this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming - this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` - this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 - this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 - this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength - this[kMaxRedirections] = maxRedirections - this[kMaxRequests] = maxRequestsPerClient - this[kClosedResolve] = null - this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 - this[kHTTPConnVersion] = 'h1' - - // HTTP/2 - this[kHTTP2Session] = null - this[kHTTP2SessionState] = !allowH2 - ? null - : { - // streams: null, // Fixed queue of streams - For future support of `push` - openStreams: 0, // Keep track of them to decide wether or not unref the session - maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server - } - this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` - - // kQueue is built up of 3 sections separated by - // the kRunningIdx and kPendingIdx indices. - // | complete | running | pending | - // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length - // kRunningIdx points to the first running element. - // kPendingIdx points to the first pending element. - // This implements a fast queue with an amortized - // time of O(1). - - this[kQueue] = [] - this[kRunningIdx] = 0 - this[kPendingIdx] = 0 - } - - get pipelining () { - return this[kPipelining] - } - - set pipelining (value) { - this[kPipelining] = value - resume(this, true) - } + if (contentLength === null) { + socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') + } - get [kPending] () { - return this[kQueue].length - this[kPendingIdx] - } + this.bytesWritten += len - get [kRunning] () { - return this[kPendingIdx] - this[kRunningIdx] - } + const ret = socket.write(chunk) - get [kSize] () { - return this[kQueue].length - this[kRunningIdx] - } + socket.uncork() - get [kConnected] () { - return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed - } + request.onBodySent(chunk) - get [kBusy] () { - const socket = this[kSocket] - return ( - (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || - (this[kSize] >= (this[kPipelining] || 1)) || - this[kPending] > 0 - ) - } + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + } - /* istanbul ignore: only used for test */ - [kConnect] (cb) { - connect(this) - this.once('connect', cb) + return ret } - [kDispatch] (opts, handler) { - const origin = opts.origin || this[kUrl].origin + end () { + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this + request.onRequestSent() - const request = this[kHTTPConnVersion] === 'h2' - ? Request[kHTTP2BuildRequest](origin, opts, handler) - : Request[kHTTP1BuildRequest](origin, opts, handler) + socket[kWriting] = false - this[kQueue].push(request) - if (this[kResuming]) { - // Do nothing. - } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { - // Wait a tick in case stream/iterator is ended in the same tick. - this[kResuming] = 1 - process.nextTick(resume, this) - } else { - resume(this, true) + if (socket[kError]) { + throw socket[kError] } - if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { - this[kNeedDrain] = 2 + if (socket.destroyed) { + return } - return this[kNeedDrain] < 2 - } + if (bytesWritten === 0) { + if (expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. - async [kClose] () { - // TODO: for H2 we need to gracefully flush the remaining enqueued - // request and close each stream. - return new Promise((resolve) => { - if (!this[kSize]) { - resolve(null) + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') } else { - this[kClosedResolve] = resolve - } - }) - } - - async [kDestroy] (err) { - return new Promise((resolve) => { - const requests = this[kQueue].splice(this[kPendingIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(this, request, err) - } - - const callback = () => { - if (this[kClosedResolve]) { - // TODO (fix): Should we error here with ClientDestroyedError? - this[kClosedResolve]() - this[kClosedResolve] = null - } - resolve() + socket.write(`${header}\r\n`, 'latin1') } + } else if (contentLength === null) { + socket.write('\r\n0\r\n\r\n', 'latin1') + } - if (this[kHTTP2Session] != null) { - util.destroy(this[kHTTP2Session], err) - this[kHTTP2Session] = null - this[kHTTP2SessionState] = null + if (contentLength !== null && bytesWritten !== contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } else { + process.emitWarning(new RequestContentLengthMismatchError()) } + } - if (!this[kSocket]) { - queueMicrotask(callback) - } else { - util.destroy(this[kSocket].on('close', callback), err) + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() } + } - resume(this) - }) + resume(client) } -} - -function onHttp2SessionError (err) { - assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') - - this[kSocket][kError] = err - onError(this[kClient], err) -} + destroy (err) { + const { socket, client } = this -function onHttp2FrameError (type, code, id) { - const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + socket[kWriting] = false - if (id === 0) { - this[kSocket][kError] = err - onError(this[kClient], err) + if (err) { + assert(client[kRunning] <= 1, 'pipeline should only contain this request') + util.destroy(socket, err) + } } } -function onHttp2SessionEnd () { - util.destroy(this, new SocketError('other side closed')) - util.destroy(this[kSocket], new SocketError('other side closed')) +function errorRequest (client, request, err) { + try { + request.onError(err) + assert(request.aborted) + } catch (err) { + client.emit('error', err) + } } -function onHTTP2GoAway (code) { - const client = this[kClient] - const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) - client[kSocket] = null - client[kHTTP2Session] = null - - if (client.destroyed) { - assert(this[kPending] === 0) - - // Fail entire queue. - const requests = client[kQueue].splice(client[kRunningIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(this, request, err) - } - } else if (client[kRunning] > 0) { - // Fail head of pipeline. - const request = client[kQueue][client[kRunningIdx]] - client[kQueue][client[kRunningIdx]++] = null - - errorRequest(client, request, err) - } +module.exports = Client - client[kPendingIdx] = client[kRunningIdx] - assert(client[kRunning] === 0) +/***/ }), - client.emit('disconnect', - client[kUrl], - [client], - err - ) +/***/ 56436: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - resume(client) -} +"use strict"; -const constants = __nccwpck_require__(953) -const createRedirectInterceptor = __nccwpck_require__(8861) -const EMPTY_BUF = Buffer.alloc(0) -async function lazyllhttp () { - const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(1145) : undefined +/* istanbul ignore file: only for Node 12 */ - let mod - try { - mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(5627), 'base64')) - } catch (e) { - /* istanbul ignore next */ +const { kConnected, kSize } = __nccwpck_require__(72785) - // We could check if the error was caused by the simd option not - // being enabled, but the occurring of this other error - // * https://github.com/emscripten-core/emscripten/issues/11495 - // got me to remove that check to avoid breaking Node 12. - mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(1145), 'base64')) +class CompatWeakRef { + constructor (value) { + this.value = value } - return await WebAssembly.instantiate(mod, { - env: { - /* eslint-disable camelcase */ + deref () { + return this.value[kConnected] === 0 && this.value[kSize] === 0 + ? undefined + : this.value + } +} - wasm_on_url: (p, at, len) => { - /* istanbul ignore next */ - return 0 - }, - wasm_on_status: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_message_begin: (p) => { - assert.strictEqual(currentParser.ptr, p) - return currentParser.onMessageBegin() || 0 - }, - wasm_on_header_field: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_header_value: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { - assert.strictEqual(currentParser.ptr, p) - return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 - }, - wasm_on_body: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_message_complete: (p) => { - assert.strictEqual(currentParser.ptr, p) - return currentParser.onMessageComplete() || 0 - } +class CompatFinalizer { + constructor (finalizer) { + this.finalizer = finalizer + } - /* eslint-enable camelcase */ + register (dispatcher, key) { + if (dispatcher.on) { + dispatcher.on('disconnect', () => { + if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { + this.finalizer(key) + } + }) } - }) + } } -let llhttpInstance = null -let llhttpPromise = lazyllhttp() -llhttpPromise.catch() +module.exports = function () { + // FIXME: remove workaround when the Node bug is fixed + // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 + if (process.env.NODE_V8_COVERAGE) { + return { + WeakRef: CompatWeakRef, + FinalizationRegistry: CompatFinalizer + } + } + return { + WeakRef: global.WeakRef || CompatWeakRef, + FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer + } +} -let currentParser = null -let currentBufferRef = null -let currentBufferSize = 0 -let currentBufferPtr = null -const TIMEOUT_HEADERS = 1 -const TIMEOUT_BODY = 2 -const TIMEOUT_IDLE = 3 +/***/ }), -class Parser { - constructor (client, socket, { exports }) { - assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) +/***/ 20663: +/***/ ((module) => { - this.llhttp = exports - this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) - this.client = client - this.socket = socket - this.timeout = null - this.timeoutValue = null - this.timeoutType = null - this.statusCode = null - this.statusText = '' - this.upgrade = false - this.headers = [] - this.headersSize = 0 - this.headersMaxSize = client[kMaxHeadersSize] - this.shouldKeepAlive = false - this.paused = false - this.resume = this.resume.bind(this) +"use strict"; - this.bytesRead = 0 - this.keepAlive = '' - this.contentLength = '' - this.connection = '' - this.maxResponseSize = client[kMaxResponseSize] - } +// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size +const maxAttributeValueSize = 1024 - setTimeout (value, type) { - this.timeoutType = type - if (value !== this.timeoutValue) { - timers.clearTimeout(this.timeout) - if (value) { - this.timeout = timers.setTimeout(onParserTimeout, value, this) - // istanbul ignore else: only for jest - if (this.timeout.unref) { - this.timeout.unref() - } - } else { - this.timeout = null - } - this.timeoutValue = value - } else if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } - } - } +// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size +const maxNameValuePairSize = 4096 - resume () { - if (this.socket.destroyed || !this.paused) { - return - } +module.exports = { + maxAttributeValueSize, + maxNameValuePairSize +} - assert(this.ptr != null) - assert(currentParser == null) - this.llhttp.llhttp_resume(this.ptr) +/***/ }), - assert(this.timeoutType === TIMEOUT_BODY) - if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } - } +/***/ 41724: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.paused = false - this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. - this.readMore() - } +"use strict"; - readMore () { - while (!this.paused && this.ptr) { - const chunk = this.socket.read() - if (chunk === null) { - break - } - this.execute(chunk) - } - } - execute (data) { - assert(this.ptr != null) - assert(currentParser == null) - assert(!this.paused) +const { parseSetCookie } = __nccwpck_require__(24408) +const { stringify, getHeadersList } = __nccwpck_require__(43121) +const { webidl } = __nccwpck_require__(21744) +const { Headers } = __nccwpck_require__(10554) - const { socket, llhttp } = this +/** + * @typedef {Object} Cookie + * @property {string} name + * @property {string} value + * @property {Date|number|undefined} expires + * @property {number|undefined} maxAge + * @property {string|undefined} domain + * @property {string|undefined} path + * @property {boolean|undefined} secure + * @property {boolean|undefined} httpOnly + * @property {'Strict'|'Lax'|'None'} sameSite + * @property {string[]} unparsed + */ - if (data.length > currentBufferSize) { - if (currentBufferPtr) { - llhttp.free(currentBufferPtr) - } - currentBufferSize = Math.ceil(data.length / 4096) * 4096 - currentBufferPtr = llhttp.malloc(currentBufferSize) - } +/** + * @param {Headers} headers + * @returns {Record} + */ +function getCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) - new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) + webidl.brandCheck(headers, Headers, { strict: false }) - // Call `execute` on the wasm parser. - // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, - // and finally the length of bytes to parse. - // The return value is an error code or `constants.ERROR.OK`. - try { - let ret + const cookie = headers.get('cookie') + const out = {} - try { - currentBufferRef = data - currentParser = this - ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) - /* eslint-disable-next-line no-useless-catch */ - } catch (err) { - /* istanbul ignore next: difficult to make a test case for */ - throw err - } finally { - currentParser = null - currentBufferRef = null - } + if (!cookie) { + return out + } - const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr + for (const piece of cookie.split(';')) { + const [name, ...value] = piece.split('=') - if (ret === constants.ERROR.PAUSED_UPGRADE) { - this.onUpgrade(data.slice(offset)) - } else if (ret === constants.ERROR.PAUSED) { - this.paused = true - socket.unshift(data.slice(offset)) - } else if (ret !== constants.ERROR.OK) { - const ptr = llhttp.llhttp_get_error_reason(this.ptr) - let message = '' - /* istanbul ignore else: difficult to make a test case for */ - if (ptr) { - const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) - message = - 'Response does not match the HTTP/1.1 protocol (' + - Buffer.from(llhttp.memory.buffer, ptr, len).toString() + - ')' - } - throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) - } - } catch (err) { - util.destroy(socket, err) - } + out[name.trim()] = value.join('=') } - destroy () { - assert(this.ptr != null) - assert(currentParser == null) + return out +} - this.llhttp.llhttp_free(this.ptr) - this.ptr = null +/** + * @param {Headers} headers + * @param {string} name + * @param {{ path?: string, domain?: string }|undefined} attributes + * @returns {void} + */ +function deleteCookie (headers, name, attributes) { + webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) - timers.clearTimeout(this.timeout) - this.timeout = null - this.timeoutValue = null - this.timeoutType = null + webidl.brandCheck(headers, Headers, { strict: false }) - this.paused = false - } + name = webidl.converters.DOMString(name) + attributes = webidl.converters.DeleteCookieAttributes(attributes) - onStatus (buf) { - this.statusText = buf.toString() - } + // Matches behavior of + // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 + setCookie(headers, { + name, + value: '', + expires: new Date(0), + ...attributes + }) +} - onMessageBegin () { - const { socket, client } = this +/** + * @param {Headers} headers + * @returns {Cookie[]} + */ +function getSetCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) - /* istanbul ignore next: difficult to make a test case for */ - if (socket.destroyed) { - return -1 - } + webidl.brandCheck(headers, Headers, { strict: false }) - const request = client[kQueue][client[kRunningIdx]] - if (!request) { - return -1 - } + const cookies = getHeadersList(headers).cookies + + if (!cookies) { + return [] } - onHeaderField (buf) { - const len = this.headers.length + // In older versions of undici, cookies is a list of name:value. + return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) +} - if ((len & 1) === 0) { - this.headers.push(buf) - } else { - this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) - } +/** + * @param {Headers} headers + * @param {Cookie} cookie + * @returns {void} + */ +function setCookie (headers, cookie) { + webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) - this.trackHeader(buf.length) - } + webidl.brandCheck(headers, Headers, { strict: false }) - onHeaderValue (buf) { - let len = this.headers.length + cookie = webidl.converters.Cookie(cookie) - if ((len & 1) === 1) { - this.headers.push(buf) - len += 1 - } else { - this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) - } + const str = stringify(cookie) - const key = this.headers[len - 2] - if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { - this.keepAlive += buf.toString() - } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { - this.connection += buf.toString() - } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { - this.contentLength += buf.toString() - } + if (str) { + headers.append('Set-Cookie', stringify(cookie)) + } +} - this.trackHeader(buf.length) +webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null } +]) - trackHeader (len) { - this.headersSize += len - if (this.headersSize >= this.headersMaxSize) { - util.destroy(this.socket, new HeadersOverflowError()) - } +webidl.converters.Cookie = webidl.dictionaryConverter([ + { + converter: webidl.converters.DOMString, + key: 'name' + }, + { + converter: webidl.converters.DOMString, + key: 'value' + }, + { + converter: webidl.nullableConverter((value) => { + if (typeof value === 'number') { + return webidl.converters['unsigned long long'](value) + } + + return new Date(value) + }), + key: 'expires', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters['long long']), + key: 'maxAge', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'secure', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'httpOnly', + defaultValue: null + }, + { + converter: webidl.converters.USVString, + key: 'sameSite', + allowedValues: ['Strict', 'Lax', 'None'] + }, + { + converter: webidl.sequenceConverter(webidl.converters.DOMString), + key: 'unparsed', + defaultValue: [] } +]) - onUpgrade (head) { - const { upgrade, client, socket, headers, statusCode } = this +module.exports = { + getCookies, + deleteCookie, + getSetCookies, + setCookie +} - assert(upgrade) - const request = client[kQueue][client[kRunningIdx]] - assert(request) +/***/ }), - assert(!socket.destroyed) - assert(socket === client[kSocket]) - assert(!this.paused) - assert(request.upgrade || request.method === 'CONNECT') +/***/ 24408: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.statusCode = null - this.statusText = '' - this.shouldKeepAlive = null +"use strict"; - assert(this.headers.length % 2 === 0) - this.headers = [] - this.headersSize = 0 - socket.unshift(head) +const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(20663) +const { isCTLExcludingHtab } = __nccwpck_require__(43121) +const { collectASequenceOfCodePointsFast } = __nccwpck_require__(685) +const assert = __nccwpck_require__(39491) - socket[kParser].destroy() - socket[kParser] = null +/** + * @description Parses the field-value attributes of a set-cookie header string. + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} header + * @returns if the header is invalid, null will be returned + */ +function parseSetCookie (header) { + // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F + // character (CTL characters excluding HTAB): Abort these steps and + // ignore the set-cookie-string entirely. + if (isCTLExcludingHtab(header)) { + return null + } - socket[kClient] = null - socket[kError] = null - socket - .removeListener('error', onSocketError) - .removeListener('readable', onSocketReadable) - .removeListener('end', onSocketEnd) - .removeListener('close', onSocketClose) + let nameValuePair = '' + let unparsedAttributes = '' + let name = '' + let value = '' - client[kSocket] = null - client[kQueue][client[kRunningIdx]++] = null - client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) + // 2. If the set-cookie-string contains a %x3B (";") character: + if (header.includes(';')) { + // 1. The name-value-pair string consists of the characters up to, + // but not including, the first %x3B (";"), and the unparsed- + // attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question). + const position = { position: 0 } - try { - request.onUpgrade(statusCode, headers, socket) - } catch (err) { - util.destroy(socket, err) - } + nameValuePair = collectASequenceOfCodePointsFast(';', header, position) + unparsedAttributes = header.slice(position.position) + } else { + // Otherwise: - resume(client) + // 1. The name-value-pair string consists of all the characters + // contained in the set-cookie-string, and the unparsed- + // attributes is the empty string. + nameValuePair = header } - onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { - const { client, socket, headers, statusText } = this - - /* istanbul ignore next: difficult to make a test case for */ - if (socket.destroyed) { - return -1 - } + // 3. If the name-value-pair string lacks a %x3D ("=") character, then + // the name string is empty, and the value string is the value of + // name-value-pair. + if (!nameValuePair.includes('=')) { + value = nameValuePair + } else { + // Otherwise, the name string consists of the characters up to, but + // not including, the first %x3D ("=") character, and the (possibly + // empty) value string consists of the characters after the first + // %x3D ("=") character. + const position = { position: 0 } + name = collectASequenceOfCodePointsFast( + '=', + nameValuePair, + position + ) + value = nameValuePair.slice(position.position + 1) + } - const request = client[kQueue][client[kRunningIdx]] + // 4. Remove any leading or trailing WSP characters from the name + // string and the value string. + name = name.trim() + value = value.trim() - /* istanbul ignore next: difficult to make a test case for */ - if (!request) { - return -1 - } + // 5. If the sum of the lengths of the name string and the value string + // is more than 4096 octets, abort these steps and ignore the set- + // cookie-string entirely. + if (name.length + value.length > maxNameValuePairSize) { + return null + } - assert(!this.upgrade) - assert(this.statusCode < 200) + // 6. The cookie-name is the name string, and the cookie-value is the + // value string. + return { + name, value, ...parseUnparsedAttributes(unparsedAttributes) + } +} - if (statusCode === 100) { - util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) - return -1 - } +/** + * Parses the remaining attributes of a set-cookie header + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} unparsedAttributes + * @param {[Object.]={}} cookieAttributeList + */ +function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { + // 1. If the unparsed-attributes string is empty, skip the rest of + // these steps. + if (unparsedAttributes.length === 0) { + return cookieAttributeList + } - /* this can only happen if server is misbehaving */ - if (upgrade && !request.upgrade) { - util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) - return -1 - } + // 2. Discard the first character of the unparsed-attributes (which + // will be a %x3B (";") character). + assert(unparsedAttributes[0] === ';') + unparsedAttributes = unparsedAttributes.slice(1) - assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) + let cookieAv = '' - this.statusCode = statusCode - this.shouldKeepAlive = ( - shouldKeepAlive || - // Override llhttp value which does not allow keepAlive for HEAD. - (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + // 3. If the remaining unparsed-attributes contains a %x3B (";") + // character: + if (unparsedAttributes.includes(';')) { + // 1. Consume the characters of the unparsed-attributes up to, but + // not including, the first %x3B (";") character. + cookieAv = collectASequenceOfCodePointsFast( + ';', + unparsedAttributes, + { position: 0 } ) + unparsedAttributes = unparsedAttributes.slice(cookieAv.length) + } else { + // Otherwise: - if (this.statusCode >= 200) { - const bodyTimeout = request.bodyTimeout != null - ? request.bodyTimeout - : client[kBodyTimeout] - this.setTimeout(bodyTimeout, TIMEOUT_BODY) - } else if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } - } + // 1. Consume the remainder of the unparsed-attributes. + cookieAv = unparsedAttributes + unparsedAttributes = '' + } - if (request.method === 'CONNECT') { - assert(client[kRunning] === 1) - this.upgrade = true - return 2 - } + // Let the cookie-av string be the characters consumed in this step. - if (upgrade) { - assert(client[kRunning] === 1) - this.upgrade = true - return 2 - } + let attributeName = '' + let attributeValue = '' - assert(this.headers.length % 2 === 0) - this.headers = [] - this.headersSize = 0 + // 4. If the cookie-av string contains a %x3D ("=") character: + if (cookieAv.includes('=')) { + // 1. The (possibly empty) attribute-name string consists of the + // characters up to, but not including, the first %x3D ("=") + // character, and the (possibly empty) attribute-value string + // consists of the characters after the first %x3D ("=") + // character. + const position = { position: 0 } - if (this.shouldKeepAlive && client[kPipelining]) { - const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null + attributeName = collectASequenceOfCodePointsFast( + '=', + cookieAv, + position + ) + attributeValue = cookieAv.slice(position.position + 1) + } else { + // Otherwise: - if (keepAliveTimeout != null) { - const timeout = Math.min( - keepAliveTimeout - client[kKeepAliveTimeoutThreshold], - client[kKeepAliveMaxTimeout] - ) - if (timeout <= 0) { - socket[kReset] = true - } else { - client[kKeepAliveTimeoutValue] = timeout - } - } else { - client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] - } - } else { - // Stop more requests from being dispatched. - socket[kReset] = true - } + // 1. The attribute-name string consists of the entire cookie-av + // string, and the attribute-value string is empty. + attributeName = cookieAv + } - const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + // 5. Remove any leading or trailing WSP characters from the attribute- + // name string and the attribute-value string. + attributeName = attributeName.trim() + attributeValue = attributeValue.trim() - if (request.aborted) { - return -1 - } + // 6. If the attribute-value is longer than 1024 octets, ignore the + // cookie-av string and return to Step 1 of this algorithm. + if (attributeValue.length > maxAttributeValueSize) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } - if (request.method === 'HEAD') { - return 1 - } + // 7. Process the attribute-name and attribute-value according to the + // requirements in the following subsections. (Notice that + // attributes with unrecognized attribute-names are ignored.) + const attributeNameLowercase = attributeName.toLowerCase() + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 + // If the attribute-name case-insensitively matches the string + // "Expires", the user agent MUST process the cookie-av as follows. + if (attributeNameLowercase === 'expires') { + // 1. Let the expiry-time be the result of parsing the attribute-value + // as cookie-date (see Section 5.1.1). + const expiryTime = new Date(attributeValue) + + // 2. If the attribute-value failed to parse as a cookie date, ignore + // the cookie-av. + + cookieAttributeList.expires = expiryTime + } else if (attributeNameLowercase === 'max-age') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 + // If the attribute-name case-insensitively matches the string "Max- + // Age", the user agent MUST process the cookie-av as follows. - if (statusCode < 200) { - return 1 + // 1. If the first character of the attribute-value is not a DIGIT or a + // "-" character, ignore the cookie-av. + const charCode = attributeValue.charCodeAt(0) + + if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } - if (socket[kBlocking]) { - socket[kBlocking] = false - resume(client) + // 2. If the remainder of attribute-value contains a non-DIGIT + // character, ignore the cookie-av. + if (!/^\d+$/.test(attributeValue)) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } - return pause ? constants.ERROR.PAUSED : 0 - } + // 3. Let delta-seconds be the attribute-value converted to an integer. + const deltaSeconds = Number(attributeValue) - onBody (buf) { - const { client, socket, statusCode, maxResponseSize } = this + // 4. Let cookie-age-limit be the maximum age of the cookie (which + // SHOULD be 400 days or less, see Section 4.1.2.2). - if (socket.destroyed) { - return -1 - } + // 5. Set delta-seconds to the smaller of its present value and cookie- + // age-limit. + // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) - const request = client[kQueue][client[kRunningIdx]] - assert(request) + // 6. If delta-seconds is less than or equal to zero (0), let expiry- + // time be the earliest representable date and time. Otherwise, let + // the expiry-time be the current date and time plus delta-seconds + // seconds. + // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds - assert.strictEqual(this.timeoutType, TIMEOUT_BODY) - if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } - } + // 7. Append an attribute to the cookie-attribute-list with an + // attribute-name of Max-Age and an attribute-value of expiry-time. + cookieAttributeList.maxAge = deltaSeconds + } else if (attributeNameLowercase === 'domain') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 + // If the attribute-name case-insensitively matches the string "Domain", + // the user agent MUST process the cookie-av as follows. - assert(statusCode >= 200) + // 1. Let cookie-domain be the attribute-value. + let cookieDomain = attributeValue - if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { - util.destroy(socket, new ResponseExceededMaxSizeError()) - return -1 + // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be + // cookie-domain without its leading %x2E ("."). + if (cookieDomain[0] === '.') { + cookieDomain = cookieDomain.slice(1) } - this.bytesRead += buf.length - - if (request.onData(buf) === false) { - return constants.ERROR.PAUSED - } - } + // 3. Convert the cookie-domain to lower case. + cookieDomain = cookieDomain.toLowerCase() - onMessageComplete () { - const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + // 4. Append an attribute to the cookie-attribute-list with an + // attribute-name of Domain and an attribute-value of cookie-domain. + cookieAttributeList.domain = cookieDomain + } else if (attributeNameLowercase === 'path') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 + // If the attribute-name case-insensitively matches the string "Path", + // the user agent MUST process the cookie-av as follows. - if (socket.destroyed && (!statusCode || shouldKeepAlive)) { - return -1 - } + // 1. If the attribute-value is empty or if the first character of the + // attribute-value is not %x2F ("/"): + let cookiePath = '' + if (attributeValue.length === 0 || attributeValue[0] !== '/') { + // 1. Let cookie-path be the default-path. + cookiePath = '/' + } else { + // Otherwise: - if (upgrade) { - return + // 1. Let cookie-path be the attribute-value. + cookiePath = attributeValue } - const request = client[kQueue][client[kRunningIdx]] - assert(request) + // 2. Append an attribute to the cookie-attribute-list with an + // attribute-name of Path and an attribute-value of cookie-path. + cookieAttributeList.path = cookiePath + } else if (attributeNameLowercase === 'secure') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 + // If the attribute-name case-insensitively matches the string "Secure", + // the user agent MUST append an attribute to the cookie-attribute-list + // with an attribute-name of Secure and an empty attribute-value. - assert(statusCode >= 100) + cookieAttributeList.secure = true + } else if (attributeNameLowercase === 'httponly') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 + // If the attribute-name case-insensitively matches the string + // "HttpOnly", the user agent MUST append an attribute to the cookie- + // attribute-list with an attribute-name of HttpOnly and an empty + // attribute-value. - this.statusCode = null - this.statusText = '' - this.bytesRead = 0 - this.contentLength = '' - this.keepAlive = '' - this.connection = '' + cookieAttributeList.httpOnly = true + } else if (attributeNameLowercase === 'samesite') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 + // If the attribute-name case-insensitively matches the string + // "SameSite", the user agent MUST process the cookie-av as follows: - assert(this.headers.length % 2 === 0) - this.headers = [] - this.headersSize = 0 + // 1. Let enforcement be "Default". + let enforcement = 'Default' - if (statusCode < 200) { - return + const attributeValueLowercase = attributeValue.toLowerCase() + // 2. If cookie-av's attribute-value is a case-insensitive match for + // "None", set enforcement to "None". + if (attributeValueLowercase.includes('none')) { + enforcement = 'None' } - /* istanbul ignore next: should be handled by llhttp? */ - if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { - util.destroy(socket, new ResponseContentLengthMismatchError()) - return -1 + // 3. If cookie-av's attribute-value is a case-insensitive match for + // "Strict", set enforcement to "Strict". + if (attributeValueLowercase.includes('strict')) { + enforcement = 'Strict' } - request.onComplete(headers) - - client[kQueue][client[kRunningIdx]++] = null - - if (socket[kWriting]) { - assert.strictEqual(client[kRunning], 0) - // Response completed before request. - util.destroy(socket, new InformationalError('reset')) - return constants.ERROR.PAUSED - } else if (!shouldKeepAlive) { - util.destroy(socket, new InformationalError('reset')) - return constants.ERROR.PAUSED - } else if (socket[kReset] && client[kRunning] === 0) { - // Destroy socket once all requests have completed. - // The request at the tail of the pipeline is the one - // that requested reset and no further requests should - // have been queued since then. - util.destroy(socket, new InformationalError('reset')) - return constants.ERROR.PAUSED - } else if (client[kPipelining] === 1) { - // We must wait a full event loop cycle to reuse this socket to make sure - // that non-spec compliant servers are not closing the connection even if they - // said they won't. - setImmediate(resume, client) - } else { - resume(client) + // 4. If cookie-av's attribute-value is a case-insensitive match for + // "Lax", set enforcement to "Lax". + if (attributeValueLowercase.includes('lax')) { + enforcement = 'Lax' } - } -} -function onParserTimeout (parser) { - const { socket, timeoutType, client } = parser + // 5. Append an attribute to the cookie-attribute-list with an + // attribute-name of "SameSite" and an attribute-value of + // enforcement. + cookieAttributeList.sameSite = enforcement + } else { + cookieAttributeList.unparsed ??= [] - /* istanbul ignore else */ - if (timeoutType === TIMEOUT_HEADERS) { - if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { - assert(!parser.paused, 'cannot be paused while waiting for headers') - util.destroy(socket, new HeadersTimeoutError()) - } - } else if (timeoutType === TIMEOUT_BODY) { - if (!parser.paused) { - util.destroy(socket, new BodyTimeoutError()) - } - } else if (timeoutType === TIMEOUT_IDLE) { - assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) - util.destroy(socket, new InformationalError('socket idle timeout')) + cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) } + + // 8. Return to Step 1 of this algorithm. + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } -function onSocketReadable () { - const { [kParser]: parser } = this - if (parser) { - parser.readMore() - } +module.exports = { + parseSetCookie, + parseUnparsedAttributes } -function onSocketError (err) { - const { [kClient]: client, [kParser]: parser } = this - assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') +/***/ }), - if (client[kHTTPConnVersion] !== 'h2') { - // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded - // to the user. - if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { - // We treat all incoming data so for as a valid response. - parser.onMessageComplete() - return - } - } +/***/ 43121: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this[kError] = err +"use strict"; - onError(this[kClient], err) -} -function onError (client, err) { - if ( - client[kRunning] === 0 && - err.code !== 'UND_ERR_INFO' && - err.code !== 'UND_ERR_SOCKET' - ) { - // Error is not caused by running request and not a recoverable - // socket error. +const assert = __nccwpck_require__(39491) +const { kHeadersList } = __nccwpck_require__(72785) - assert(client[kPendingIdx] === client[kRunningIdx]) +function isCTLExcludingHtab (value) { + if (value.length === 0) { + return false + } - const requests = client[kQueue].splice(client[kRunningIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(client, request, err) + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + (code >= 0x00 || code <= 0x08) || + (code >= 0x0A || code <= 0x1F) || + code === 0x7F + ) { + return false } - assert(client[kSize] === 0) } } -function onSocketEnd () { - const { [kParser]: parser, [kClient]: client } = this +/** + CHAR = + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + * @param {string} name + */ +function validateCookieName (name) { + for (const char of name) { + const code = char.charCodeAt(0) - if (client[kHTTPConnVersion] !== 'h2') { - if (parser.statusCode && !parser.shouldKeepAlive) { - // We treat all incoming data so far as a valid response. - parser.onMessageComplete() - return + if ( + (code <= 0x20 || code > 0x7F) || + char === '(' || + char === ')' || + char === '>' || + char === '<' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' + ) { + throw new Error('Invalid cookie name') } } +} - util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +/** + cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + ; US-ASCII characters excluding CTLs, + ; whitespace DQUOTE, comma, semicolon, + ; and backslash + * @param {string} value + */ +function validateCookieValue (value) { + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || // exclude CTLs (0-31) + code === 0x22 || + code === 0x2C || + code === 0x3B || + code === 0x5C || + code > 0x7E // non-ascii + ) { + throw new Error('Invalid header value') + } + } } -function onSocketClose () { - const { [kClient]: client, [kParser]: parser } = this +/** + * path-value = + * @param {string} path + */ +function validateCookiePath (path) { + for (const char of path) { + const code = char.charCodeAt(0) - if (client[kHTTPConnVersion] === 'h1' && parser) { - if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { - // We treat all incoming data so far as a valid response. - parser.onMessageComplete() + if (code < 0x21 || char === ';') { + throw new Error('Invalid cookie path') } + } +} - this[kParser].destroy() - this[kParser] = null +/** + * I have no idea why these values aren't allowed to be honest, + * but Deno tests these. - Khafra + * @param {string} domain + */ +function validateCookieDomain (domain) { + if ( + domain.startsWith('-') || + domain.endsWith('.') || + domain.endsWith('-') + ) { + throw new Error('Invalid cookie domain') } +} - const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) +/** + * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 + * @param {number|Date} date + IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT + ; fixed length/zone/capitalization subset of the format + ; see Section 3.3 of [RFC5322] - client[kSocket] = null + day-name = %x4D.6F.6E ; "Mon", case-sensitive + / %x54.75.65 ; "Tue", case-sensitive + / %x57.65.64 ; "Wed", case-sensitive + / %x54.68.75 ; "Thu", case-sensitive + / %x46.72.69 ; "Fri", case-sensitive + / %x53.61.74 ; "Sat", case-sensitive + / %x53.75.6E ; "Sun", case-sensitive + date1 = day SP month SP year + ; e.g., 02 Jun 1982 - if (client.destroyed) { - assert(client[kPending] === 0) + day = 2DIGIT + month = %x4A.61.6E ; "Jan", case-sensitive + / %x46.65.62 ; "Feb", case-sensitive + / %x4D.61.72 ; "Mar", case-sensitive + / %x41.70.72 ; "Apr", case-sensitive + / %x4D.61.79 ; "May", case-sensitive + / %x4A.75.6E ; "Jun", case-sensitive + / %x4A.75.6C ; "Jul", case-sensitive + / %x41.75.67 ; "Aug", case-sensitive + / %x53.65.70 ; "Sep", case-sensitive + / %x4F.63.74 ; "Oct", case-sensitive + / %x4E.6F.76 ; "Nov", case-sensitive + / %x44.65.63 ; "Dec", case-sensitive + year = 4DIGIT - // Fail entire queue. - const requests = client[kQueue].splice(client[kRunningIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(client, request, err) - } - } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { - // Fail head of pipeline. - const request = client[kQueue][client[kRunningIdx]] - client[kQueue][client[kRunningIdx]++] = null + GMT = %x47.4D.54 ; "GMT", case-sensitive - errorRequest(client, request, err) + time-of-day = hour ":" minute ":" second + ; 00:00:00 - 23:59:60 (leap second) + + hour = 2DIGIT + minute = 2DIGIT + second = 2DIGIT + */ +function toIMFDate (date) { + if (typeof date === 'number') { + date = new Date(date) } - client[kPendingIdx] = client[kRunningIdx] + const days = [ + 'Sun', 'Mon', 'Tue', 'Wed', + 'Thu', 'Fri', 'Sat' + ] - assert(client[kRunning] === 0) + const months = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] - client.emit('disconnect', client[kUrl], [client], err) + const dayName = days[date.getUTCDay()] + const day = date.getUTCDate().toString().padStart(2, '0') + const month = months[date.getUTCMonth()] + const year = date.getUTCFullYear() + const hour = date.getUTCHours().toString().padStart(2, '0') + const minute = date.getUTCMinutes().toString().padStart(2, '0') + const second = date.getUTCSeconds().toString().padStart(2, '0') - resume(client) + return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` } -async function connect (client) { - assert(!client[kConnecting]) - assert(!client[kSocket]) +/** + max-age-av = "Max-Age=" non-zero-digit *DIGIT + ; In practice, both expires-av and max-age-av + ; are limited to dates representable by the + ; user agent. + * @param {number} maxAge + */ +function validateCookieMaxAge (maxAge) { + if (maxAge < 0) { + throw new Error('Invalid cookie max-age') + } +} - let { host, hostname, protocol, port } = client[kUrl] +/** + * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * @param {import('./index').Cookie} cookie + */ +function stringify (cookie) { + if (cookie.name.length === 0) { + return null + } - // Resolve ipv6 - if (hostname[0] === '[') { - const idx = hostname.indexOf(']') + validateCookieName(cookie.name) + validateCookieValue(cookie.value) - assert(idx !== -1) - const ip = hostname.substring(1, idx) + const out = [`${cookie.name}=${cookie.value}`] - assert(net.isIP(ip)) - hostname = ip + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 + if (cookie.name.startsWith('__Secure-')) { + cookie.secure = true } - client[kConnecting] = true - - if (channels.beforeConnect.hasSubscribers) { - channels.beforeConnect.publish({ - connectParams: { - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, - connector: client[kConnector] - }) + if (cookie.name.startsWith('__Host-')) { + cookie.secure = true + cookie.domain = null + cookie.path = '/' } - try { - const socket = await new Promise((resolve, reject) => { - client[kConnector]({ - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, (err, socket) => { - if (err) { - reject(err) - } else { - resolve(socket) - } - }) - }) - - if (client.destroyed) { - util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) - return - } + if (cookie.secure) { + out.push('Secure') + } - client[kConnecting] = false + if (cookie.httpOnly) { + out.push('HttpOnly') + } - assert(socket) + if (typeof cookie.maxAge === 'number') { + validateCookieMaxAge(cookie.maxAge) + out.push(`Max-Age=${cookie.maxAge}`) + } - const isH2 = socket.alpnProtocol === 'h2' - if (isH2) { - if (!h2ExperimentalWarned) { - h2ExperimentalWarned = true - process.emitWarning('H2 support is experimental, expect them to change at any time.', { - code: 'UNDICI-H2' - }) - } + if (cookie.domain) { + validateCookieDomain(cookie.domain) + out.push(`Domain=${cookie.domain}`) + } - const session = http2.connect(client[kUrl], { - createConnection: () => socket, - peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams - }) + if (cookie.path) { + validateCookiePath(cookie.path) + out.push(`Path=${cookie.path}`) + } - client[kHTTPConnVersion] = 'h2' - session[kClient] = client - session[kSocket] = socket - session.on('error', onHttp2SessionError) - session.on('frameError', onHttp2FrameError) - session.on('end', onHttp2SessionEnd) - session.on('goaway', onHTTP2GoAway) - session.on('close', onSocketClose) - session.unref() + if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { + out.push(`Expires=${toIMFDate(cookie.expires)}`) + } - client[kHTTP2Session] = session - socket[kHTTP2Session] = session - } else { - if (!llhttpInstance) { - llhttpInstance = await llhttpPromise - llhttpPromise = null - } + if (cookie.sameSite) { + out.push(`SameSite=${cookie.sameSite}`) + } - socket[kNoRef] = false - socket[kWriting] = false - socket[kReset] = false - socket[kBlocking] = false - socket[kParser] = new Parser(client, socket, llhttpInstance) + for (const part of cookie.unparsed) { + if (!part.includes('=')) { + throw new Error('Invalid unparsed') } - socket[kCounter] = 0 - socket[kMaxRequests] = client[kMaxRequests] - socket[kClient] = client - socket[kError] = null - - socket - .on('error', onSocketError) - .on('readable', onSocketReadable) - .on('end', onSocketEnd) - .on('close', onSocketClose) + const [key, ...value] = part.split('=') - client[kSocket] = socket + out.push(`${key.trim()}=${value.join('=')}`) + } - if (channels.connected.hasSubscribers) { - channels.connected.publish({ - connectParams: { - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, - connector: client[kConnector], - socket - }) - } - client.emit('connect', client[kUrl], [client]) - } catch (err) { - if (client.destroyed) { - return - } + return out.join('; ') +} - client[kConnecting] = false +let kHeadersListNode - if (channels.connectError.hasSubscribers) { - channels.connectError.publish({ - connectParams: { - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, - connector: client[kConnector], - error: err - }) - } +function getHeadersList (headers) { + if (headers[kHeadersList]) { + return headers[kHeadersList] + } - if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { - assert(client[kRunning] === 0) - while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { - const request = client[kQueue][client[kPendingIdx]++] - errorRequest(client, request, err) - } - } else { - onError(client, err) - } + if (!kHeadersListNode) { + kHeadersListNode = Object.getOwnPropertySymbols(headers).find( + (symbol) => symbol.description === 'headers list' + ) - client.emit('connectionError', client[kUrl], [client], err) + assert(kHeadersListNode, 'Headers cannot be parsed') } - resume(client) + const headersList = headers[kHeadersListNode] + assert(headersList) + + return headersList } -function emitDrain (client) { - client[kNeedDrain] = 0 - client.emit('drain', client[kUrl], [client]) +module.exports = { + isCTLExcludingHtab, + stringify, + getHeadersList } -function resume (client, sync) { - if (client[kResuming] === 2) { - return - } - client[kResuming] = 2 +/***/ }), - _resume(client, sync) - client[kResuming] = 0 +/***/ 82067: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (client[kRunningIdx] > 256) { - client[kQueue].splice(0, client[kRunningIdx]) - client[kPendingIdx] -= client[kRunningIdx] - client[kRunningIdx] = 0 - } -} +"use strict"; -function _resume (client, sync) { - while (true) { - if (client.destroyed) { - assert(client[kPending] === 0) - return - } - if (client[kClosedResolve] && !client[kSize]) { - client[kClosedResolve]() - client[kClosedResolve] = null - return - } +const net = __nccwpck_require__(41808) +const assert = __nccwpck_require__(39491) +const util = __nccwpck_require__(83983) +const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(48045) - const socket = client[kSocket] +let tls // include tls conditionally since it is not always available - if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { - if (client[kSize] === 0) { - if (!socket[kNoRef] && socket.unref) { - socket.unref() - socket[kNoRef] = true - } - } else if (socket[kNoRef] && socket.ref) { - socket.ref() - socket[kNoRef] = false - } +// TODO: session re-use does not wait for the first +// connection to resolve the session and might therefore +// resolve the same servername multiple times even when +// re-use is enabled. - if (client[kSize] === 0) { - if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { - socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) +let SessionCache +// FIXME: remove workaround when the Node bug is fixed +// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 +if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { + SessionCache = class WeakSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + this._sessionRegistry = new global.FinalizationRegistry((key) => { + if (this._sessionCache.size < this._maxCachedSessions) { + return } - } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { - if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { - const request = client[kQueue][client[kRunningIdx]] - const headersTimeout = request.headersTimeout != null - ? request.headersTimeout - : client[kHeadersTimeout] - socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + + const ref = this._sessionCache.get(key) + if (ref !== undefined && ref.deref() === undefined) { + this._sessionCache.delete(key) } - } + }) } - if (client[kBusy]) { - client[kNeedDrain] = 2 - } else if (client[kNeedDrain] === 2) { - if (sync) { - client[kNeedDrain] = 1 - process.nextTick(emitDrain, client) - } else { - emitDrain(client) - } - continue + get (sessionKey) { + const ref = this._sessionCache.get(sessionKey) + return ref ? ref.deref() : null } - if (client[kPending] === 0) { - return - } + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } - if (client[kRunning] >= (client[kPipelining] || 1)) { - return + this._sessionCache.set(sessionKey, new WeakRef(session)) + this._sessionRegistry.register(session, sessionKey) + } + } +} else { + SessionCache = class SimpleSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() } - const request = client[kQueue][client[kPendingIdx]] + get (sessionKey) { + return this._sessionCache.get(sessionKey) + } - if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { - if (client[kRunning] > 0) { + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { return } - client[kServerName] = request.servername - - if (socket && socket.servername !== request.servername) { - util.destroy(socket, new InformationalError('servername changed')) - return + if (this._sessionCache.size >= this._maxCachedSessions) { + // remove the oldest session + const { value: oldestKey } = this._sessionCache.keys().next() + this._sessionCache.delete(oldestKey) } - } - if (client[kConnecting]) { - return + this._sessionCache.set(sessionKey, session) } + } +} - if (!socket && !client[kHTTP2Session]) { - connect(client) - return - } +function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { + if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { + throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + } - if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { - return - } + const options = { path: socketPath, ...opts } + const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) + timeout = timeout == null ? 10e3 : timeout + allowH2 = allowH2 != null ? allowH2 : false + return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { + let socket + if (protocol === 'https:') { + if (!tls) { + tls = __nccwpck_require__(24404) + } + servername = servername || options.servername || util.getServerName(host) || null - if (client[kRunning] > 0 && !request.idempotent) { - // Non-idempotent request cannot be retried. - // Ensure that no other requests are inflight and - // could cause failure. - return - } + const sessionKey = servername || hostname + const session = sessionCache.get(sessionKey) || null - if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { - // Don't dispatch an upgrade until all preceding requests have completed. - // A misbehaving server might upgrade the connection before all pipelined - // request has completed. - return - } + assert(sessionKey) - if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && - (util.isStream(request.body) || util.isAsyncIterable(request.body))) { - // Request with stream or iterator body can error while other requests - // are inflight and indirectly error those as well. - // Ensure this doesn't happen by waiting for inflight - // to complete before dispatching. + socket = tls.connect({ + highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... + ...options, + servername, + session, + localAddress, + // TODO(HTTP/2): Add support for h2c + ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], + socket: httpSocket, // upgrade socket connection + port: port || 443, + host: hostname + }) - // Request with stream or iterator body cannot be retried. - // Ensure that no other requests are inflight and - // could cause failure. - return + socket + .on('session', function (session) { + // TODO (fix): Can a session become invalid once established? Don't think so? + sessionCache.set(sessionKey, session) + }) + } else { + assert(!httpSocket, 'httpSocket can only be sent on TLS update') + socket = net.connect({ + highWaterMark: 64 * 1024, // Same as nodejs fs streams. + ...options, + localAddress, + port: port || 80, + host: hostname + }) } - if (!request.aborted && write(client, request)) { - client[kPendingIdx]++ - } else { - client[kQueue].splice(client[kPendingIdx], 1) + // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket + if (options.keepAlive == null || options.keepAlive) { + const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay + socket.setKeepAlive(true, keepAliveInitialDelay) } + + const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + + socket + .setNoDelay(true) + .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(null, this) + } + }) + .on('error', function (err) { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(err) + } + }) + + return socket } } -// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 -function shouldSendContentLength (method) { - return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' -} +function setupTimeout (onConnectTimeout, timeout) { + if (!timeout) { + return () => {} + } -function write (client, request) { - if (client[kHTTPConnVersion] === 'h2') { - writeH2(client, client[kHTTP2Session], request) - return + let s1 = null + let s2 = null + const timeoutId = setTimeout(() => { + // setImmediate is added to make sure that we priotorise socket error events over timeouts + s1 = setImmediate(() => { + if (process.platform === 'win32') { + // Windows needs an extra setImmediate probably due to implementation differences in the socket logic + s2 = setImmediate(() => onConnectTimeout()) + } else { + onConnectTimeout() + } + }) + }, timeout) + return () => { + clearTimeout(timeoutId) + clearImmediate(s1) + clearImmediate(s2) } +} - const { body, method, path, host, upgrade, headers, blocking, reset } = request +function onConnectTimeout (socket) { + util.destroy(socket, new ConnectTimeoutError()) +} - // https://tools.ietf.org/html/rfc7231#section-4.3.1 - // https://tools.ietf.org/html/rfc7231#section-4.3.2 - // https://tools.ietf.org/html/rfc7231#section-4.3.5 +module.exports = buildConnector - // Sending a payload body on a request that does not - // expect it can cause undefined behavior on some - // servers and corrupt connection state. Do not - // re-use the connection for further requests. - const expectsPayload = ( - method === 'PUT' || - method === 'POST' || - method === 'PATCH' - ) +/***/ }), - if (body && typeof body.read === 'function') { - // Try to read EOF in order to get length. - body.read(0) - } +/***/ 14462: +/***/ ((module) => { - const bodyLength = util.bodyLength(body) +"use strict"; - let contentLength = bodyLength - if (contentLength === null) { - contentLength = request.contentLength - } +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] - if (contentLength === 0 && !expectsPayload) { - // https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD NOT send a Content-Length header field when - // the request message does not contain a payload body and the method - // semantics do not anticipate such a body. +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} - contentLength = null - } +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) - // https://github.com/nodejs/undici/issues/2046 - // A user agent may send a Content-Length header with 0 value, this should be allowed. - if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { - if (client[kStrictContentLength]) { - errorRequest(client, request, new RequestContentLengthMismatchError()) - return false - } +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} - process.emitWarning(new RequestContentLengthMismatchError()) + +/***/ }), + +/***/ 48045: +/***/ ((module) => { + +"use strict"; + + +class UndiciError extends Error { + constructor (message) { + super(message) + this.name = 'UndiciError' + this.code = 'UND_ERR' } +} - const socket = client[kSocket] +class ConnectTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ConnectTimeoutError) + this.name = 'ConnectTimeoutError' + this.message = message || 'Connect Timeout Error' + this.code = 'UND_ERR_CONNECT_TIMEOUT' + } +} - try { - request.onConnect((err) => { - if (request.aborted || request.completed) { - return - } +class HeadersTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersTimeoutError) + this.name = 'HeadersTimeoutError' + this.message = message || 'Headers Timeout Error' + this.code = 'UND_ERR_HEADERS_TIMEOUT' + } +} - errorRequest(client, request, err || new RequestAbortedError()) +class HeadersOverflowError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersOverflowError) + this.name = 'HeadersOverflowError' + this.message = message || 'Headers Overflow Error' + this.code = 'UND_ERR_HEADERS_OVERFLOW' + } +} - util.destroy(socket, new InformationalError('aborted')) - }) - } catch (err) { - errorRequest(client, request, err) +class BodyTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, BodyTimeoutError) + this.name = 'BodyTimeoutError' + this.message = message || 'Body Timeout Error' + this.code = 'UND_ERR_BODY_TIMEOUT' + } +} + +class ResponseStatusCodeError extends UndiciError { + constructor (message, statusCode, headers, body) { + super(message) + Error.captureStackTrace(this, ResponseStatusCodeError) + this.name = 'ResponseStatusCodeError' + this.message = message || 'Response Status Code Error' + this.code = 'UND_ERR_RESPONSE_STATUS_CODE' + this.body = body + this.status = statusCode + this.statusCode = statusCode + this.headers = headers } +} - if (request.aborted) { - return false +class InvalidArgumentError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidArgumentError) + this.name = 'InvalidArgumentError' + this.message = message || 'Invalid Argument Error' + this.code = 'UND_ERR_INVALID_ARG' } +} - if (method === 'HEAD') { - // https://github.com/mcollina/undici/issues/258 - // Close after a HEAD request to interop with misbehaving servers - // that may send a body in the response. - - socket[kReset] = true +class InvalidReturnValueError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidReturnValueError) + this.name = 'InvalidReturnValueError' + this.message = message || 'Invalid Return Value Error' + this.code = 'UND_ERR_INVALID_RETURN_VALUE' } +} - if (upgrade || method === 'CONNECT') { - // On CONNECT or upgrade, block pipeline from dispatching further - // requests on this connection. +class RequestAbortedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestAbortedError) + this.name = 'AbortError' + this.message = message || 'Request aborted' + this.code = 'UND_ERR_ABORTED' + } +} - socket[kReset] = true +class InformationalError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InformationalError) + this.name = 'InformationalError' + this.message = message || 'Request information' + this.code = 'UND_ERR_INFO' } +} - if (reset != null) { - socket[kReset] = reset +class RequestContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestContentLengthMismatchError) + this.name = 'RequestContentLengthMismatchError' + this.message = message || 'Request body length does not match content-length header' + this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' } +} - if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { - socket[kReset] = true +class ResponseContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseContentLengthMismatchError) + this.name = 'ResponseContentLengthMismatchError' + this.message = message || 'Response body length does not match content-length header' + this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' } +} - if (blocking) { - socket[kBlocking] = true +class ClientDestroyedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientDestroyedError) + this.name = 'ClientDestroyedError' + this.message = message || 'The client is destroyed' + this.code = 'UND_ERR_DESTROYED' } +} - let header = `${method} ${path} HTTP/1.1\r\n` +class ClientClosedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientClosedError) + this.name = 'ClientClosedError' + this.message = message || 'The client is closed' + this.code = 'UND_ERR_CLOSED' + } +} - if (typeof host === 'string') { - header += `host: ${host}\r\n` - } else { - header += client[kHostHeader] +class SocketError extends UndiciError { + constructor (message, socket) { + super(message) + Error.captureStackTrace(this, SocketError) + this.name = 'SocketError' + this.message = message || 'Socket error' + this.code = 'UND_ERR_SOCKET' + this.socket = socket } +} - if (upgrade) { - header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` - } else if (client[kPipelining] && !socket[kReset]) { - header += 'connection: keep-alive\r\n' - } else { - header += 'connection: close\r\n' +class NotSupportedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'NotSupportedError' + this.message = message || 'Not supported error' + this.code = 'UND_ERR_NOT_SUPPORTED' } +} - if (headers) { - header += headers +class BalancedPoolMissingUpstreamError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'MissingUpstreamError' + this.message = message || 'No upstream has been added to the BalancedPool' + this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' } +} - if (channels.sendHeaders.hasSubscribers) { - channels.sendHeaders.publish({ request, headers: header, socket }) +class HTTPParserError extends Error { + constructor (message, code, data) { + super(message) + Error.captureStackTrace(this, HTTPParserError) + this.name = 'HTTPParserError' + this.code = code ? `HPE_${code}` : undefined + this.data = data ? data.toString() : undefined } +} - /* istanbul ignore else: assertion */ - if (!body || bodyLength === 0) { - if (contentLength === 0) { - socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') - } else { - assert(contentLength === null, 'no body must not have content length') - socket.write(`${header}\r\n`, 'latin1') - } - request.onRequestSent() - } else if (util.isBuffer(body)) { - assert(contentLength === body.byteLength, 'buffer body must have content length') +class ResponseExceededMaxSizeError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseExceededMaxSizeError) + this.name = 'ResponseExceededMaxSizeError' + this.message = message || 'Response content exceeded max size' + this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' + } +} - socket.cork() - socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') - socket.write(body) - socket.uncork() - request.onBodySent(body) - request.onRequestSent() - if (!expectsPayload) { - socket[kReset] = true - } - } else if (util.isBlobLike(body)) { - if (typeof body.stream === 'function') { - writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) - } else { - writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) - } - } else if (util.isStream(body)) { - writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) - } else if (util.isIterable(body)) { - writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) - } else { - assert(false) +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers } +} - return true +module.exports = { + HTTPParserError, + UndiciError, + HeadersTimeoutError, + HeadersOverflowError, + BodyTimeoutError, + RequestContentLengthMismatchError, + ConnectTimeoutError, + ResponseStatusCodeError, + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError, + ClientDestroyedError, + ClientClosedError, + InformationalError, + SocketError, + NotSupportedError, + ResponseContentLengthMismatchError, + BalancedPoolMissingUpstreamError, + ResponseExceededMaxSizeError, + RequestRetryError } -function writeH2 (client, session, request) { - const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request - let headers - if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) - else headers = reqHeaders +/***/ }), - if (upgrade) { - errorRequest(client, request, new Error('Upgrade not supported for H2')) - return false - } +/***/ 62905: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - try { - // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? - request.onConnect((err) => { - if (request.aborted || request.completed) { - return - } +"use strict"; - errorRequest(client, request, err || new RequestAbortedError()) - }) - } catch (err) { - errorRequest(client, request, err) - } - if (request.aborted) { - return false - } +const { + InvalidArgumentError, + NotSupportedError +} = __nccwpck_require__(48045) +const assert = __nccwpck_require__(39491) +const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(72785) +const util = __nccwpck_require__(83983) - /** @type {import('node:http2').ClientHttp2Stream} */ - let stream - const h2State = client[kHTTP2SessionState] +// tokenRegExp and headerCharRegex have been lifted from +// https://github.com/nodejs/node/blob/main/lib/_http_common.js - headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] - headers[HTTP2_HEADER_METHOD] = method +/** + * Verifies that the given val is a valid HTTP token + * per the rules defined in RFC 7230 + * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + */ +const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ - if (method === 'CONNECT') { - session.ref() - // we are already connected, streams are pending, first request - // will create a new stream. We trigger a request to create the stream and wait until - // `ready` event is triggered - // We disabled endStream to allow the user to write to the stream - stream = session.request(headers, { endStream: false, signal }) +/** + * Matches if val contains an invalid field-vchar + * field-value = *( field-content / obs-fold ) + * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] + * field-vchar = VCHAR / obs-text + */ +const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - if (stream.id && !stream.pending) { - request.onUpgrade(null, null, stream) - ++h2State.openStreams - } else { - stream.once('ready', () => { - request.onUpgrade(null, null, stream) - ++h2State.openStreams - }) +// Verifies that a given path is valid does not contain control chars \x00 to \x20 +const invalidPathRegex = /[^\u0021-\u00ff]/ + +const kHandler = Symbol('handler') + +const channels = {} + +let extractBody + +try { + const diagnosticsChannel = __nccwpck_require__(67643) + channels.create = diagnosticsChannel.channel('undici:request:create') + channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') + channels.headers = diagnosticsChannel.channel('undici:request:headers') + channels.trailers = diagnosticsChannel.channel('undici:request:trailers') + channels.error = diagnosticsChannel.channel('undici:request:error') +} catch { + channels.create = { hasSubscribers: false } + channels.bodySent = { hasSubscribers: false } + channels.headers = { hasSubscribers: false } + channels.trailers = { hasSubscribers: false } + channels.error = { hasSubscribers: false } +} + +class Request { + constructor (origin, { + path, + method, + body, + headers, + query, + idempotent, + blocking, + upgrade, + headersTimeout, + bodyTimeout, + reset, + throwOnError, + expectContinue + }, handler) { + if (typeof path !== 'string') { + throw new InvalidArgumentError('path must be a string') + } else if ( + path[0] !== '/' && + !(path.startsWith('http://') || path.startsWith('https://')) && + method !== 'CONNECT' + ) { + throw new InvalidArgumentError('path must be an absolute URL or start with a slash') + } else if (invalidPathRegex.exec(path) !== null) { + throw new InvalidArgumentError('invalid request path') } - stream.once('close', () => { - h2State.openStreams -= 1 - // TODO(HTTP/2): unref only if current streams count is 0 - if (h2State.openStreams === 0) session.unref() - }) + if (typeof method !== 'string') { + throw new InvalidArgumentError('method must be a string') + } else if (tokenRegExp.exec(method) === null) { + throw new InvalidArgumentError('invalid request method') + } - return true - } + if (upgrade && typeof upgrade !== 'string') { + throw new InvalidArgumentError('upgrade must be a string') + } - // https://tools.ietf.org/html/rfc7540#section-8.3 - // :path and :scheme headers must be omited when sending CONNECT + if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('invalid headersTimeout') + } - headers[HTTP2_HEADER_PATH] = path - headers[HTTP2_HEADER_SCHEME] = 'https' + if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('invalid bodyTimeout') + } - // https://tools.ietf.org/html/rfc7231#section-4.3.1 - // https://tools.ietf.org/html/rfc7231#section-4.3.2 - // https://tools.ietf.org/html/rfc7231#section-4.3.5 + if (reset != null && typeof reset !== 'boolean') { + throw new InvalidArgumentError('invalid reset') + } - // Sending a payload body on a request that does not - // expect it can cause undefined behavior on some - // servers and corrupt connection state. Do not - // re-use the connection for further requests. + if (expectContinue != null && typeof expectContinue !== 'boolean') { + throw new InvalidArgumentError('invalid expectContinue') + } - const expectsPayload = ( - method === 'PUT' || - method === 'POST' || - method === 'PATCH' - ) + this.headersTimeout = headersTimeout - if (body && typeof body.read === 'function') { - // Try to read EOF in order to get length. - body.read(0) - } + this.bodyTimeout = bodyTimeout - let contentLength = util.bodyLength(body) + this.throwOnError = throwOnError === true - if (contentLength == null) { - contentLength = request.contentLength - } + this.method = method - if (contentLength === 0 || !expectsPayload) { - // https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD NOT send a Content-Length header field when - // the request message does not contain a payload body and the method - // semantics do not anticipate such a body. + this.abort = null - contentLength = null - } + if (body == null) { + this.body = null + } else if (util.isStream(body)) { + this.body = body - // https://github.com/nodejs/undici/issues/2046 - // A user agent may send a Content-Length header with 0 value, this should be allowed. - if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { - if (client[kStrictContentLength]) { - errorRequest(client, request, new RequestContentLengthMismatchError()) - return false + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) + } + this.body.on('end', this.endHandler) + } + + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err + } + } + this.body.on('error', this.errorHandler) + } else if (util.isBuffer(body)) { + this.body = body.byteLength ? body : null + } else if (ArrayBuffer.isView(body)) { + this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null + } else if (body instanceof ArrayBuffer) { + this.body = body.byteLength ? Buffer.from(body) : null + } else if (typeof body === 'string') { + this.body = body.length ? Buffer.from(body) : null + } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { + this.body = body + } else { + throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') } - process.emitWarning(new RequestContentLengthMismatchError()) - } + this.completed = false - if (contentLength != null) { - assert(body, 'no body must not have content length') - headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` - } + this.aborted = false - session.ref() + this.upgrade = upgrade || null - const shouldEndStream = method === 'GET' || method === 'HEAD' - if (expectContinue) { - headers[HTTP2_HEADER_EXPECT] = '100-continue' - stream = session.request(headers, { endStream: shouldEndStream, signal }) + this.path = query ? util.buildURL(path, query) : path - stream.once('continue', writeBodyH2) - } else { - stream = session.request(headers, { - endStream: shouldEndStream, - signal - }) - writeBodyH2() - } + this.origin = origin - // Increment counter as we have new several streams open - ++h2State.openStreams + this.idempotent = idempotent == null + ? method === 'HEAD' || method === 'GET' + : idempotent - stream.once('response', headers => { - const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + this.blocking = blocking == null ? false : blocking - if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { - stream.pause() - } - }) + this.reset = reset == null ? null : reset - stream.once('end', () => { - request.onComplete([]) - }) + this.host = null - stream.on('data', (chunk) => { - if (request.onData(chunk) === false) { - stream.pause() - } - }) + this.contentLength = null - stream.once('close', () => { - h2State.openStreams -= 1 - // TODO(HTTP/2): unref only if current streams count is 0 - if (h2State.openStreams === 0) { - session.unref() - } - }) + this.contentType = null - stream.once('error', function (err) { - if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { - h2State.streams -= 1 - util.destroy(stream, err) + this.headers = '' + + // Only for H2 + this.expectContinue = expectContinue != null ? expectContinue : false + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(this, headers[i], headers[i + 1]) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(this, key, headers[key]) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') } - }) - stream.once('frameError', (type, code) => { - const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) - errorRequest(client, request, err) + if (util.isFormDataLike(this.body)) { + if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { + throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') + } + + if (!extractBody) { + extractBody = (__nccwpck_require__(41472).extractBody) + } - if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { - h2State.streams -= 1 - util.destroy(stream, err) + const [bodyStream, contentType] = extractBody(body) + if (this.contentType == null) { + this.contentType = contentType + this.headers += `content-type: ${contentType}\r\n` + } + this.body = bodyStream.stream + this.contentLength = bodyStream.length + } else if (util.isBlobLike(body) && this.contentType == null && body.type) { + this.contentType = body.type + this.headers += `content-type: ${body.type}\r\n` } - }) - // stream.on('aborted', () => { - // // TODO(HTTP/2): Support aborted - // }) - - // stream.on('timeout', () => { - // // TODO(HTTP/2): Support timeout - // }) + util.validateHandler(handler, method, upgrade) - // stream.on('push', headers => { - // // TODO(HTTP/2): Suppor push - // }) + this.servername = util.getServerName(this.host) - // stream.on('trailers', headers => { - // // TODO(HTTP/2): Support trailers - // }) + this[kHandler] = handler - return true + if (channels.create.hasSubscribers) { + channels.create.publish({ request: this }) + } + } - function writeBodyH2 () { - /* istanbul ignore else: assertion */ - if (!body) { - request.onRequestSent() - } else if (util.isBuffer(body)) { - assert(contentLength === body.byteLength, 'buffer body must have content length') - stream.cork() - stream.write(body) - stream.uncork() - stream.end() - request.onBodySent(body) - request.onRequestSent() - } else if (util.isBlobLike(body)) { - if (typeof body.stream === 'function') { - writeIterable({ - client, - request, - contentLength, - h2stream: stream, - expectsPayload, - body: body.stream(), - socket: client[kSocket], - header: '' - }) - } else { - writeBlob({ - body, - client, - request, - contentLength, - expectsPayload, - h2stream: stream, - header: '', - socket: client[kSocket] - }) + onBodySent (chunk) { + if (this[kHandler].onBodySent) { + try { + return this[kHandler].onBodySent(chunk) + } catch (err) { + this.abort(err) } - } else if (util.isStream(body)) { - writeStream({ - body, - client, - request, - contentLength, - expectsPayload, - socket: client[kSocket], - h2stream: stream, - header: '' - }) - } else if (util.isIterable(body)) { - writeIterable({ - body, - client, - request, - contentLength, - expectsPayload, - header: '', - h2stream: stream, - socket: client[kSocket] - }) - } else { - assert(false) } } -} -function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { - assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') + onRequestSent () { + if (channels.bodySent.hasSubscribers) { + channels.bodySent.publish({ request: this }) + } - if (client[kHTTPConnVersion] === 'h2') { - // For HTTP/2, is enough to pipe the stream - const pipe = pipeline( - body, - h2stream, - (err) => { - if (err) { - util.destroy(body, err) - util.destroy(h2stream, err) - } else { - request.onRequestSent() - } + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) } - ) + } + } - pipe.on('data', onPipeData) - pipe.once('end', () => { - pipe.removeListener('data', onPipeData) - util.destroy(pipe) - }) + onConnect (abort) { + assert(!this.aborted) + assert(!this.completed) - function onPipeData (chunk) { - request.onBodySent(chunk) + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) } - - return } - let finished = false - - const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + onHeaders (statusCode, headers, resume, statusText) { + assert(!this.aborted) + assert(!this.completed) - const onData = function (chunk) { - if (finished) { - return + if (channels.headers.hasSubscribers) { + channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) } try { - if (!writer.write(chunk) && this.pause) { - this.pause() - } + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) } catch (err) { - util.destroy(this, err) + this.abort(err) } } - const onDrain = function () { - if (finished) { - return - } - if (body.resume) { - body.resume() + onData (chunk) { + assert(!this.aborted) + assert(!this.completed) + + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false } } - const onAbort = function () { - if (finished) { - return - } - const err = new RequestAbortedError() - queueMicrotask(() => onFinished(err)) + + onUpgrade (statusCode, headers, socket) { + assert(!this.aborted) + assert(!this.completed) + + return this[kHandler].onUpgrade(statusCode, headers, socket) } - const onFinished = function (err) { - if (finished) { - return - } - finished = true + onComplete (trailers) { + this.onFinally() - assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) + assert(!this.aborted) - socket - .off('drain', onDrain) - .off('error', onFinished) + this.completed = true + if (channels.trailers.hasSubscribers) { + channels.trailers.publish({ request: this, trailers }) + } - body - .removeListener('data', onData) - .removeListener('end', onFinished) - .removeListener('error', onFinished) - .removeListener('close', onAbort) + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } + } - if (!err) { - try { - writer.end() - } catch (er) { - err = er - } + onError (error) { + this.onFinally() + + if (channels.error.hasSubscribers) { + channels.error.publish({ request: this, error }) } - writer.destroy(err) + if (this.aborted) { + return + } + this.aborted = true - if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { - util.destroy(body, err) - } else { - util.destroy(body) + return this[kHandler].onError(error) + } + + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } + + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null } } - body - .on('data', onData) - .on('end', onFinished) - .on('error', onFinished) - .on('close', onAbort) + // TODO: adjust to support H2 + addHeader (key, value) { + processHeader(this, key, value) + return this + } - if (body.resume) { - body.resume() + static [kHTTP1BuildRequest] (origin, opts, handler) { + // TODO: Migrate header parsing here, to make Requests + // HTTP agnostic + return new Request(origin, opts, handler) } - socket - .on('drain', onDrain) - .on('error', onFinished) -} + static [kHTTP2BuildRequest] (origin, opts, handler) { + const headers = opts.headers + opts = { ...opts, headers: null } -async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { - assert(contentLength === body.size, 'blob body must have content length') + const request = new Request(origin, opts, handler) - const isH2 = client[kHTTPConnVersion] === 'h2' - try { - if (contentLength != null && contentLength !== body.size) { - throw new RequestContentLengthMismatchError() + request.headers = {} + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(request, headers[i], headers[i + 1], true) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(request, key, headers[key], true) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') } - const buffer = Buffer.from(await body.arrayBuffer()) + return request + } - if (isH2) { - h2stream.cork() - h2stream.write(buffer) - h2stream.uncork() - } else { - socket.cork() - socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') - socket.write(buffer) - socket.uncork() - } + static [kHTTP2CopyHeaders] (raw) { + const rawHeaders = raw.split('\r\n') + const headers = {} - request.onBodySent(buffer) - request.onRequestSent() + for (const header of rawHeaders) { + const [key, value] = header.split(': ') - if (!expectsPayload) { - socket[kReset] = true + if (value == null || value.length === 0) continue + + if (headers[key]) headers[key] += `,${value}` + else headers[key] = value } - resume(client) - } catch (err) { - util.destroy(isH2 ? h2stream : socket, err) + return headers } } -async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { - assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') - - let callback = null - function onDrain () { - if (callback) { - const cb = callback - callback = null - cb() - } +function processHeaderValue (key, val, skipAppend) { + if (val && typeof val === 'object') { + throw new InvalidArgumentError(`invalid ${key} header`) } - const waitForDrain = () => new Promise((resolve, reject) => { - assert(callback === null) + val = val != null ? `${val}` : '' - if (socket[kError]) { - reject(socket[kError]) - } else { - callback = resolve - } - }) + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } - if (client[kHTTPConnVersion] === 'h2') { - h2stream - .on('close', onDrain) - .on('drain', onDrain) + return skipAppend ? val : `${key}: ${val}\r\n` +} - try { - // It's up to the user to somehow abort the async iterable. - for await (const chunk of body) { - if (socket[kError]) { - throw socket[kError] - } +function processHeader (request, key, val, skipAppend = false) { + if (val && (typeof val === 'object' && !Array.isArray(val))) { + throw new InvalidArgumentError(`invalid ${key} header`) + } else if (val === undefined) { + return + } - const res = h2stream.write(chunk) - request.onBodySent(chunk) - if (!res) { - await waitForDrain() + if ( + request.host === null && + key.length === 4 && + key.toLowerCase() === 'host' + ) { + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + // Consumed by Client + request.host = val + } else if ( + request.contentLength === null && + key.length === 14 && + key.toLowerCase() === 'content-length' + ) { + request.contentLength = parseInt(val, 10) + if (!Number.isFinite(request.contentLength)) { + throw new InvalidArgumentError('invalid content-length header') + } + } else if ( + request.contentType === null && + key.length === 12 && + key.toLowerCase() === 'content-type' + ) { + request.contentType = val + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } else if ( + key.length === 17 && + key.toLowerCase() === 'transfer-encoding' + ) { + throw new InvalidArgumentError('invalid transfer-encoding header') + } else if ( + key.length === 10 && + key.toLowerCase() === 'connection' + ) { + const value = typeof val === 'string' ? val.toLowerCase() : null + if (value !== 'close' && value !== 'keep-alive') { + throw new InvalidArgumentError('invalid connection header') + } else if (value === 'close') { + request.reset = true + } + } else if ( + key.length === 10 && + key.toLowerCase() === 'keep-alive' + ) { + throw new InvalidArgumentError('invalid keep-alive header') + } else if ( + key.length === 7 && + key.toLowerCase() === 'upgrade' + ) { + throw new InvalidArgumentError('invalid upgrade header') + } else if ( + key.length === 6 && + key.toLowerCase() === 'expect' + ) { + throw new NotSupportedError('expect header not supported') + } else if (tokenRegExp.exec(key) === null) { + throw new InvalidArgumentError('invalid header key') + } else { + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + if (skipAppend) { + if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` + else request.headers[key] = processHeaderValue(key, val[i], skipAppend) + } else { + request.headers += processHeaderValue(key, val[i]) } } - } catch (err) { - h2stream.destroy(err) - } finally { - request.onRequestSent() - h2stream.end() - h2stream - .off('close', onDrain) - .off('drain', onDrain) + } else { + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) } - - return } +} - socket - .on('close', onDrain) - .on('drain', onDrain) +module.exports = Request - const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) - try { - // It's up to the user to somehow abort the async iterable. - for await (const chunk of body) { - if (socket[kError]) { - throw socket[kError] - } - if (!writer.write(chunk)) { - await waitForDrain() - } - } +/***/ }), - writer.end() - } catch (err) { - writer.destroy(err) - } finally { - socket - .off('close', onDrain) - .off('drain', onDrain) - } +/***/ 72785: +/***/ ((module) => { + +module.exports = { + kClose: Symbol('close'), + kDestroy: Symbol('destroy'), + kDispatch: Symbol('dispatch'), + kUrl: Symbol('url'), + kWriting: Symbol('writing'), + kResuming: Symbol('resuming'), + kQueue: Symbol('queue'), + kConnect: Symbol('connect'), + kConnecting: Symbol('connecting'), + kHeadersList: Symbol('headers list'), + kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), + kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), + kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), + kKeepAliveTimeoutValue: Symbol('keep alive timeout'), + kKeepAlive: Symbol('keep alive'), + kHeadersTimeout: Symbol('headers timeout'), + kBodyTimeout: Symbol('body timeout'), + kServerName: Symbol('server name'), + kLocalAddress: Symbol('local address'), + kHost: Symbol('host'), + kNoRef: Symbol('no ref'), + kBodyUsed: Symbol('used'), + kRunning: Symbol('running'), + kBlocking: Symbol('blocking'), + kPending: Symbol('pending'), + kSize: Symbol('size'), + kBusy: Symbol('busy'), + kQueued: Symbol('queued'), + kFree: Symbol('free'), + kConnected: Symbol('connected'), + kClosed: Symbol('closed'), + kNeedDrain: Symbol('need drain'), + kReset: Symbol('reset'), + kDestroyed: Symbol.for('nodejs.stream.destroyed'), + kMaxHeadersSize: Symbol('max headers size'), + kRunningIdx: Symbol('running index'), + kPendingIdx: Symbol('pending index'), + kError: Symbol('error'), + kClients: Symbol('clients'), + kClient: Symbol('client'), + kParser: Symbol('parser'), + kOnDestroyed: Symbol('destroy callbacks'), + kPipelining: Symbol('pipelining'), + kSocket: Symbol('socket'), + kHostHeader: Symbol('host header'), + kConnector: Symbol('connector'), + kStrictContentLength: Symbol('strict content length'), + kMaxRedirections: Symbol('maxRedirections'), + kMaxRequests: Symbol('maxRequestsPerClient'), + kProxy: Symbol('proxy agent options'), + kCounter: Symbol('socket request counter'), + kInterceptors: Symbol('dispatch interceptors'), + kMaxResponseSize: Symbol('max response size'), + kHTTP2Session: Symbol('http2Session'), + kHTTP2SessionState: Symbol('http2Session state'), + kHTTP2BuildRequest: Symbol('http2 build request'), + kHTTP1BuildRequest: Symbol('http1 build request'), + kHTTP2CopyHeaders: Symbol('http2 copy headers'), + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') } -class AsyncWriter { - constructor ({ socket, request, contentLength, client, expectsPayload, header }) { - this.socket = socket - this.request = request - this.contentLength = contentLength - this.client = client - this.bytesWritten = 0 - this.expectsPayload = expectsPayload - this.header = header - - socket[kWriting] = true - } - write (chunk) { - const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this +/***/ }), - if (socket[kError]) { - throw socket[kError] - } +/***/ 83983: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (socket.destroyed) { - return false - } +"use strict"; - const len = Buffer.byteLength(chunk) - if (!len) { - return true - } - // We should defer writing chunks. - if (contentLength !== null && bytesWritten + len > contentLength) { - if (client[kStrictContentLength]) { - throw new RequestContentLengthMismatchError() - } +const assert = __nccwpck_require__(39491) +const { kDestroyed, kBodyUsed } = __nccwpck_require__(72785) +const { IncomingMessage } = __nccwpck_require__(13685) +const stream = __nccwpck_require__(12781) +const net = __nccwpck_require__(41808) +const { InvalidArgumentError } = __nccwpck_require__(48045) +const { Blob } = __nccwpck_require__(14300) +const nodeUtil = __nccwpck_require__(73837) +const { stringify } = __nccwpck_require__(63477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(14462) - process.emitWarning(new RequestContentLengthMismatchError()) - } +const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) - socket.cork() +function nop () {} - if (bytesWritten === 0) { - if (!expectsPayload) { - socket[kReset] = true - } +function isStream (obj) { + return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +} - if (contentLength === null) { - socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') - } else { - socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') - } - } +// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) +function isBlobLike (object) { + return (Blob && object instanceof Blob) || ( + object && + typeof object === 'object' && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + /^(Blob|File)$/.test(object[Symbol.toStringTag]) + ) +} - if (contentLength === null) { - socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') - } +function buildURL (url, queryParams) { + if (url.includes('?') || url.includes('#')) { + throw new Error('Query params cannot be passed when url already contains "?" or "#".') + } - this.bytesWritten += len + const stringified = stringify(queryParams) - const ret = socket.write(chunk) + if (stringified) { + url += '?' + stringified + } - socket.uncork() + return url +} - request.onBodySent(chunk) +function parseURL (url) { + if (typeof url === 'string') { + url = new URL(url) - if (!ret) { - if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { - // istanbul ignore else: only for jest - if (socket[kParser].timeout.refresh) { - socket[kParser].timeout.refresh() - } - } + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') } - return ret + return url } - end () { - const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this - request.onRequestSent() + if (!url || typeof url !== 'object') { + throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + } - socket[kWriting] = false + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } - if (socket[kError]) { - throw socket[kError] + if (!(url instanceof URL)) { + if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { + throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') } - if (socket.destroyed) { - return + if (url.path != null && typeof url.path !== 'string') { + throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') } - if (bytesWritten === 0) { - if (expectsPayload) { - // https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD send a Content-Length in a request message when - // no Transfer-Encoding is sent and the request method defines a meaning - // for an enclosed payload body. + if (url.pathname != null && typeof url.pathname !== 'string') { + throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') + } - socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') - } else { - socket.write(`${header}\r\n`, 'latin1') - } - } else if (contentLength === null) { - socket.write('\r\n0\r\n\r\n', 'latin1') + if (url.hostname != null && typeof url.hostname !== 'string') { + throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') } - if (contentLength !== null && bytesWritten !== contentLength) { - if (client[kStrictContentLength]) { - throw new RequestContentLengthMismatchError() - } else { - process.emitWarning(new RequestContentLengthMismatchError()) - } + if (url.origin != null && typeof url.origin !== 'string') { + throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') } - if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { - // istanbul ignore else: only for jest - if (socket[kParser].timeout.refresh) { - socket[kParser].timeout.refresh() - } + const port = url.port != null + ? url.port + : (url.protocol === 'https:' ? 443 : 80) + let origin = url.origin != null + ? url.origin + : `${url.protocol}//${url.hostname}:${port}` + let path = url.path != null + ? url.path + : `${url.pathname || ''}${url.search || ''}` + + if (origin.endsWith('/')) { + origin = origin.substring(0, origin.length - 1) } - resume(client) + if (path && !path.startsWith('/')) { + path = `/${path}` + } + // new URL(path, origin) is unsafe when `path` contains an absolute URL + // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: + // If first parameter is a relative URL, second param is required, and will be used as the base URL. + // If first parameter is an absolute URL, a given second param will be ignored. + url = new URL(origin + path) } - destroy (err) { - const { socket, client } = this + return url +} - socket[kWriting] = false +function parseOrigin (url) { + url = parseURL(url) - if (err) { - assert(client[kRunning] <= 1, 'pipeline should only contain this request') - util.destroy(socket, err) - } + if (url.pathname !== '/' || url.search || url.hash) { + throw new InvalidArgumentError('invalid url') } + + return url } -function errorRequest (client, request, err) { - try { - request.onError(err) - assert(request.aborted) - } catch (err) { - client.emit('error', err) +function getHostname (host) { + if (host[0] === '[') { + const idx = host.indexOf(']') + + assert(idx !== -1) + return host.substring(1, idx) } -} -module.exports = Client + const idx = host.indexOf(':') + if (idx === -1) return host + return host.substring(0, idx) +} -/***/ }), +// IP addresses are not valid server names per RFC6066 +// > Currently, the only server names supported are DNS hostnames +function getServerName (host) { + if (!host) { + return null + } -/***/ 6436: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + assert.strictEqual(typeof host, 'string') -"use strict"; + const servername = getHostname(host) + if (net.isIP(servername)) { + return '' + } + return servername +} -/* istanbul ignore file: only for Node 12 */ +function deepClone (obj) { + return JSON.parse(JSON.stringify(obj)) +} -const { kConnected, kSize } = __nccwpck_require__(2785) +function isAsyncIterable (obj) { + return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') +} -class CompatWeakRef { - constructor (value) { - this.value = value - } +function isIterable (obj) { + return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) +} - deref () { - return this.value[kConnected] === 0 && this.value[kSize] === 0 - ? undefined - : this.value +function bodyLength (body) { + if (body == null) { + return 0 + } else if (isStream(body)) { + const state = body._readableState + return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) + ? state.length + : null + } else if (isBlobLike(body)) { + return body.size != null ? body.size : null + } else if (isBuffer(body)) { + return body.byteLength } + + return null } -class CompatFinalizer { - constructor (finalizer) { - this.finalizer = finalizer - } +function isDestroyed (stream) { + return !stream || !!(stream.destroyed || stream[kDestroyed]) +} - register (dispatcher, key) { - if (dispatcher.on) { - dispatcher.on('disconnect', () => { - if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { - this.finalizer(key) - } - }) - } - } +function isReadableAborted (stream) { + const state = stream && stream._readableState + return isDestroyed(stream) && state && !state.endEmitted } -module.exports = function () { - // FIXME: remove workaround when the Node bug is fixed - // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 - if (process.env.NODE_V8_COVERAGE) { - return { - WeakRef: CompatWeakRef, - FinalizationRegistry: CompatFinalizer +function destroy (stream, err) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { + return + } + + if (typeof stream.destroy === 'function') { + if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { + // See: https://github.com/nodejs/node/pull/38505/files + stream.socket = null } + + stream.destroy(err) + } else if (err) { + process.nextTick((stream, err) => { + stream.emit('error', err) + }, stream, err) } - return { - WeakRef: global.WeakRef || CompatWeakRef, - FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer + + if (stream.destroyed !== true) { + stream[kDestroyed] = true } } +const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ +function parseKeepAliveTimeout (val) { + const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) + return m ? parseInt(m[1], 10) * 1000 : null +} -/***/ }), - -/***/ 663: -/***/ ((module) => { +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} -"use strict"; +function parseHeaders (headers, obj = {}) { + // For H2 support + if (!Array.isArray(headers)) return headers + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i].toString().toLowerCase() + let val = obj[key] -// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size -const maxAttributeValueSize = 1024 + if (!val) { + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1].map(x => x.toString('utf8')) + } else { + obj[key] = headers[i + 1].toString('utf8') + } + } else { + if (!Array.isArray(val)) { + val = [val] + obj[key] = val + } + val.push(headers[i + 1].toString('utf8')) + } + } -// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size -const maxNameValuePairSize = 4096 + // See https://github.com/nodejs/node/pull/46528 + if ('content-length' in obj && 'content-disposition' in obj) { + obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') + } -module.exports = { - maxAttributeValueSize, - maxNameValuePairSize + return obj } +function parseRawHeaders (headers) { + const ret = [] + let hasContentLength = false + let contentDispositionIdx = -1 -/***/ }), - -/***/ 1724: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0].toString() + const val = headers[n + 1].toString('utf8') -"use strict"; + if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { + ret.push(key, val) + hasContentLength = true + } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { + contentDispositionIdx = ret.push(key, val) - 1 + } else { + ret.push(key, val) + } + } + // See https://github.com/nodejs/node/pull/46528 + if (hasContentLength && contentDispositionIdx !== -1) { + ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + } -const { parseSetCookie } = __nccwpck_require__(4408) -const { stringify, getHeadersList } = __nccwpck_require__(3121) -const { webidl } = __nccwpck_require__(1744) -const { Headers } = __nccwpck_require__(554) + return ret +} -/** - * @typedef {Object} Cookie - * @property {string} name - * @property {string} value - * @property {Date|number|undefined} expires - * @property {number|undefined} maxAge - * @property {string|undefined} domain - * @property {string|undefined} path - * @property {boolean|undefined} secure - * @property {boolean|undefined} httpOnly - * @property {'Strict'|'Lax'|'None'} sameSite - * @property {string[]} unparsed - */ +function isBuffer (buffer) { + // See, https://github.com/mcollina/undici/pull/319 + return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) +} -/** - * @param {Headers} headers - * @returns {Record} - */ -function getCookies (headers) { - webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) +function validateHandler (handler, method, upgrade) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } - webidl.brandCheck(headers, Headers, { strict: false }) + if (typeof handler.onConnect !== 'function') { + throw new InvalidArgumentError('invalid onConnect method') + } - const cookie = headers.get('cookie') - const out = {} + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } - if (!cookie) { - return out + if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { + throw new InvalidArgumentError('invalid onBodySent method') } - for (const piece of cookie.split(';')) { - const [name, ...value] = piece.split('=') + if (upgrade || method === 'CONNECT') { + if (typeof handler.onUpgrade !== 'function') { + throw new InvalidArgumentError('invalid onUpgrade method') + } + } else { + if (typeof handler.onHeaders !== 'function') { + throw new InvalidArgumentError('invalid onHeaders method') + } - out[name.trim()] = value.join('=') - } + if (typeof handler.onData !== 'function') { + throw new InvalidArgumentError('invalid onData method') + } - return out + if (typeof handler.onComplete !== 'function') { + throw new InvalidArgumentError('invalid onComplete method') + } + } } -/** - * @param {Headers} headers - * @param {string} name - * @param {{ path?: string, domain?: string }|undefined} attributes - * @returns {void} - */ -function deleteCookie (headers, name, attributes) { - webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) - - webidl.brandCheck(headers, Headers, { strict: false }) +// A body is disturbed if it has been read from and it cannot +// be re-used without losing state or data. +function isDisturbed (body) { + return !!(body && ( + stream.isDisturbed + ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? + : body[kBodyUsed] || + body.readableDidRead || + (body._readableState && body._readableState.dataEmitted) || + isReadableAborted(body) + )) +} - name = webidl.converters.DOMString(name) - attributes = webidl.converters.DeleteCookieAttributes(attributes) +function isErrored (body) { + return !!(body && ( + stream.isErrored + ? stream.isErrored(body) + : /state: 'errored'/.test(nodeUtil.inspect(body) + ))) +} - // Matches behavior of - // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 - setCookie(headers, { - name, - value: '', - expires: new Date(0), - ...attributes - }) +function isReadable (body) { + return !!(body && ( + stream.isReadable + ? stream.isReadable(body) + : /state: 'readable'/.test(nodeUtil.inspect(body) + ))) } -/** - * @param {Headers} headers - * @returns {Cookie[]} - */ -function getSetCookies (headers) { - webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) +function getSocketInfo (socket) { + return { + localAddress: socket.localAddress, + localPort: socket.localPort, + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteFamily, + timeout: socket.timeout, + bytesWritten: socket.bytesWritten, + bytesRead: socket.bytesRead + } +} - webidl.brandCheck(headers, Headers, { strict: false }) +async function * convertIterableToBuffer (iterable) { + for await (const chunk of iterable) { + yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + } +} - const cookies = getHeadersList(headers).cookies +let ReadableStream +function ReadableStreamFrom (iterable) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(35356).ReadableStream) + } - if (!cookies) { - return [] + if (ReadableStream.from) { + return ReadableStream.from(convertIterableToBuffer(iterable)) } - // In older versions of undici, cookies is a list of name:value. - return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) + let iterator + return new ReadableStream( + { + async start () { + iterator = iterable[Symbol.asyncIterator]() + }, + async pull (controller) { + const { done, value } = await iterator.next() + if (done) { + queueMicrotask(() => { + controller.close() + }) + } else { + const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) + controller.enqueue(new Uint8Array(buf)) + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + } + }, + 0 + ) } -/** - * @param {Headers} headers - * @param {Cookie} cookie - * @returns {void} - */ -function setCookie (headers, cookie) { - webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) +// The chunk should be a FormData instance and contains +// all the required methods. +function isFormDataLike (object) { + return ( + object && + typeof object === 'object' && + typeof object.append === 'function' && + typeof object.delete === 'function' && + typeof object.get === 'function' && + typeof object.getAll === 'function' && + typeof object.has === 'function' && + typeof object.set === 'function' && + object[Symbol.toStringTag] === 'FormData' + ) +} - webidl.brandCheck(headers, Headers, { strict: false }) +function throwIfAborted (signal) { + if (!signal) { return } + if (typeof signal.throwIfAborted === 'function') { + signal.throwIfAborted() + } else { + if (signal.aborted) { + // DOMException not available < v17.0.0 + const err = new Error('The operation was aborted') + err.name = 'AbortError' + throw err + } + } +} - cookie = webidl.converters.Cookie(cookie) +function addAbortListener (signal, listener) { + if ('addEventListener' in signal) { + signal.addEventListener('abort', listener, { once: true }) + return () => signal.removeEventListener('abort', listener) + } + signal.addListener('abort', listener) + return () => signal.removeListener('abort', listener) +} - const str = stringify(cookie) +const hasToWellFormed = !!String.prototype.toWellFormed - if (str) { - headers.append('Set-Cookie', stringify(cookie)) +/** + * @param {string} val + */ +function toUSVString (val) { + if (hasToWellFormed) { + return `${val}`.toWellFormed() + } else if (nodeUtil.toUSVString) { + return nodeUtil.toUSVString(val) } + + return `${val}` } -webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'path', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'domain', - defaultValue: null - } -]) +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } -webidl.converters.Cookie = webidl.dictionaryConverter([ - { - converter: webidl.converters.DOMString, - key: 'name' - }, - { - converter: webidl.converters.DOMString, - key: 'value' - }, - { - converter: webidl.nullableConverter((value) => { - if (typeof value === 'number') { - return webidl.converters['unsigned long long'](value) + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null } + : null +} - return new Date(value) - }), - key: 'expires', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters['long long']), - key: 'maxAge', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'domain', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'path', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.boolean), - key: 'secure', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.boolean), - key: 'httpOnly', - defaultValue: null - }, - { - converter: webidl.converters.USVString, - key: 'sameSite', - allowedValues: ['Strict', 'Lax', 'None'] - }, - { - converter: webidl.sequenceConverter(webidl.converters.DOMString), - key: 'unparsed', - defaultValue: [] - } -]) +const kEnumerableProperty = Object.create(null) +kEnumerableProperty.enumerable = true module.exports = { - getCookies, - deleteCookie, - getSetCookies, - setCookie + kEnumerableProperty, + nop, + isDisturbed, + isErrored, + isReadable, + toUSVString, + isReadableAborted, + isBlobLike, + parseOrigin, + parseURL, + getServerName, + isStream, + isIterable, + isAsyncIterable, + isDestroyed, + headerNameToString, + parseRawHeaders, + parseHeaders, + parseKeepAliveTimeout, + destroy, + bodyLength, + deepClone, + ReadableStreamFrom, + isBuffer, + validateHandler, + getSocketInfo, + isFormDataLike, + buildURL, + throwIfAborted, + addAbortListener, + parseRangeHeader, + nodeMajor, + nodeMinor, + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] } /***/ }), -/***/ 4408: +/***/ 74839: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(663) -const { isCTLExcludingHtab } = __nccwpck_require__(3121) -const { collectASequenceOfCodePointsFast } = __nccwpck_require__(685) -const assert = __nccwpck_require__(9491) - -/** - * @description Parses the field-value attributes of a set-cookie header string. - * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 - * @param {string} header - * @returns if the header is invalid, null will be returned - */ -function parseSetCookie (header) { - // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F - // character (CTL characters excluding HTAB): Abort these steps and - // ignore the set-cookie-string entirely. - if (isCTLExcludingHtab(header)) { - return null - } - - let nameValuePair = '' - let unparsedAttributes = '' - let name = '' - let value = '' - - // 2. If the set-cookie-string contains a %x3B (";") character: - if (header.includes(';')) { - // 1. The name-value-pair string consists of the characters up to, - // but not including, the first %x3B (";"), and the unparsed- - // attributes consist of the remainder of the set-cookie-string - // (including the %x3B (";") in question). - const position = { position: 0 } +const Dispatcher = __nccwpck_require__(60412) +const { + ClientDestroyedError, + ClientClosedError, + InvalidArgumentError +} = __nccwpck_require__(48045) +const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(72785) - nameValuePair = collectASequenceOfCodePointsFast(';', header, position) - unparsedAttributes = header.slice(position.position) - } else { - // Otherwise: +const kDestroyed = Symbol('destroyed') +const kClosed = Symbol('closed') +const kOnDestroyed = Symbol('onDestroyed') +const kOnClosed = Symbol('onClosed') +const kInterceptedDispatch = Symbol('Intercepted Dispatch') - // 1. The name-value-pair string consists of all the characters - // contained in the set-cookie-string, and the unparsed- - // attributes is the empty string. - nameValuePair = header - } +class DispatcherBase extends Dispatcher { + constructor () { + super() - // 3. If the name-value-pair string lacks a %x3D ("=") character, then - // the name string is empty, and the value string is the value of - // name-value-pair. - if (!nameValuePair.includes('=')) { - value = nameValuePair - } else { - // Otherwise, the name string consists of the characters up to, but - // not including, the first %x3D ("=") character, and the (possibly - // empty) value string consists of the characters after the first - // %x3D ("=") character. - const position = { position: 0 } - name = collectASequenceOfCodePointsFast( - '=', - nameValuePair, - position - ) - value = nameValuePair.slice(position.position + 1) + this[kDestroyed] = false + this[kOnDestroyed] = null + this[kClosed] = false + this[kOnClosed] = [] } - // 4. Remove any leading or trailing WSP characters from the name - // string and the value string. - name = name.trim() - value = value.trim() - - // 5. If the sum of the lengths of the name string and the value string - // is more than 4096 octets, abort these steps and ignore the set- - // cookie-string entirely. - if (name.length + value.length > maxNameValuePairSize) { - return null + get destroyed () { + return this[kDestroyed] } - // 6. The cookie-name is the name string, and the cookie-value is the - // value string. - return { - name, value, ...parseUnparsedAttributes(unparsedAttributes) + get closed () { + return this[kClosed] } -} -/** - * Parses the remaining attributes of a set-cookie header - * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 - * @param {string} unparsedAttributes - * @param {[Object.]={}} cookieAttributeList - */ -function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { - // 1. If the unparsed-attributes string is empty, skip the rest of - // these steps. - if (unparsedAttributes.length === 0) { - return cookieAttributeList + get interceptors () { + return this[kInterceptors] } - // 2. Discard the first character of the unparsed-attributes (which - // will be a %x3B (";") character). - assert(unparsedAttributes[0] === ';') - unparsedAttributes = unparsedAttributes.slice(1) - - let cookieAv = '' - - // 3. If the remaining unparsed-attributes contains a %x3B (";") - // character: - if (unparsedAttributes.includes(';')) { - // 1. Consume the characters of the unparsed-attributes up to, but - // not including, the first %x3B (";") character. - cookieAv = collectASequenceOfCodePointsFast( - ';', - unparsedAttributes, - { position: 0 } - ) - unparsedAttributes = unparsedAttributes.slice(cookieAv.length) - } else { - // Otherwise: + set interceptors (newInterceptors) { + if (newInterceptors) { + for (let i = newInterceptors.length - 1; i >= 0; i--) { + const interceptor = this[kInterceptors][i] + if (typeof interceptor !== 'function') { + throw new InvalidArgumentError('interceptor must be an function') + } + } + } - // 1. Consume the remainder of the unparsed-attributes. - cookieAv = unparsedAttributes - unparsedAttributes = '' + this[kInterceptors] = newInterceptors } - // Let the cookie-av string be the characters consumed in this step. + close (callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.close((err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } - let attributeName = '' - let attributeValue = '' + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } - // 4. If the cookie-av string contains a %x3D ("=") character: - if (cookieAv.includes('=')) { - // 1. The (possibly empty) attribute-name string consists of the - // characters up to, but not including, the first %x3D ("=") - // character, and the (possibly empty) attribute-value string - // consists of the characters after the first %x3D ("=") - // character. - const position = { position: 0 } + if (this[kDestroyed]) { + queueMicrotask(() => callback(new ClientDestroyedError(), null)) + return + } - attributeName = collectASequenceOfCodePointsFast( - '=', - cookieAv, - position - ) - attributeValue = cookieAv.slice(position.position + 1) - } else { - // Otherwise: + if (this[kClosed]) { + if (this[kOnClosed]) { + this[kOnClosed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } - // 1. The attribute-name string consists of the entire cookie-av - // string, and the attribute-value string is empty. - attributeName = cookieAv - } + this[kClosed] = true + this[kOnClosed].push(callback) - // 5. Remove any leading or trailing WSP characters from the attribute- - // name string and the attribute-value string. - attributeName = attributeName.trim() - attributeValue = attributeValue.trim() + const onClosed = () => { + const callbacks = this[kOnClosed] + this[kOnClosed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } - // 6. If the attribute-value is longer than 1024 octets, ignore the - // cookie-av string and return to Step 1 of this algorithm. - if (attributeValue.length > maxAttributeValueSize) { - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + // Should not error. + this[kClose]() + .then(() => this.destroy()) + .then(() => { + queueMicrotask(onClosed) + }) } - // 7. Process the attribute-name and attribute-value according to the - // requirements in the following subsections. (Notice that - // attributes with unrecognized attribute-names are ignored.) - const attributeNameLowercase = attributeName.toLowerCase() - - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 - // If the attribute-name case-insensitively matches the string - // "Expires", the user agent MUST process the cookie-av as follows. - if (attributeNameLowercase === 'expires') { - // 1. Let the expiry-time be the result of parsing the attribute-value - // as cookie-date (see Section 5.1.1). - const expiryTime = new Date(attributeValue) - - // 2. If the attribute-value failed to parse as a cookie date, ignore - // the cookie-av. - - cookieAttributeList.expires = expiryTime - } else if (attributeNameLowercase === 'max-age') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 - // If the attribute-name case-insensitively matches the string "Max- - // Age", the user agent MUST process the cookie-av as follows. - - // 1. If the first character of the attribute-value is not a DIGIT or a - // "-" character, ignore the cookie-av. - const charCode = attributeValue.charCodeAt(0) + destroy (err, callback) { + if (typeof err === 'function') { + callback = err + err = null + } - if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.destroy(err, (err, data) => { + return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) + }) + }) } - // 2. If the remainder of attribute-value contains a non-DIGIT - // character, ignore the cookie-av. - if (!/^\d+$/.test(attributeValue)) { - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') } - // 3. Let delta-seconds be the attribute-value converted to an integer. - const deltaSeconds = Number(attributeValue) + if (this[kDestroyed]) { + if (this[kOnDestroyed]) { + this[kOnDestroyed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } - // 4. Let cookie-age-limit be the maximum age of the cookie (which - // SHOULD be 400 days or less, see Section 4.1.2.2). + if (!err) { + err = new ClientDestroyedError() + } - // 5. Set delta-seconds to the smaller of its present value and cookie- - // age-limit. - // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) + this[kDestroyed] = true + this[kOnDestroyed] = this[kOnDestroyed] || [] + this[kOnDestroyed].push(callback) - // 6. If delta-seconds is less than or equal to zero (0), let expiry- - // time be the earliest representable date and time. Otherwise, let - // the expiry-time be the current date and time plus delta-seconds - // seconds. - // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds + const onDestroyed = () => { + const callbacks = this[kOnDestroyed] + this[kOnDestroyed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } - // 7. Append an attribute to the cookie-attribute-list with an - // attribute-name of Max-Age and an attribute-value of expiry-time. - cookieAttributeList.maxAge = deltaSeconds - } else if (attributeNameLowercase === 'domain') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 - // If the attribute-name case-insensitively matches the string "Domain", - // the user agent MUST process the cookie-av as follows. + // Should not error. + this[kDestroy](err).then(() => { + queueMicrotask(onDestroyed) + }) + } - // 1. Let cookie-domain be the attribute-value. - let cookieDomain = attributeValue + [kInterceptedDispatch] (opts, handler) { + if (!this[kInterceptors] || this[kInterceptors].length === 0) { + this[kInterceptedDispatch] = this[kDispatch] + return this[kDispatch](opts, handler) + } - // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be - // cookie-domain without its leading %x2E ("."). - if (cookieDomain[0] === '.') { - cookieDomain = cookieDomain.slice(1) + let dispatch = this[kDispatch].bind(this) + for (let i = this[kInterceptors].length - 1; i >= 0; i--) { + dispatch = this[kInterceptors][i](dispatch) } + this[kInterceptedDispatch] = dispatch + return dispatch(opts, handler) + } - // 3. Convert the cookie-domain to lower case. - cookieDomain = cookieDomain.toLowerCase() + dispatch (opts, handler) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } - // 4. Append an attribute to the cookie-attribute-list with an - // attribute-name of Domain and an attribute-value of cookie-domain. - cookieAttributeList.domain = cookieDomain - } else if (attributeNameLowercase === 'path') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 - // If the attribute-name case-insensitively matches the string "Path", - // the user agent MUST process the cookie-av as follows. + try { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object.') + } - // 1. If the attribute-value is empty or if the first character of the - // attribute-value is not %x2F ("/"): - let cookiePath = '' - if (attributeValue.length === 0 || attributeValue[0] !== '/') { - // 1. Let cookie-path be the default-path. - cookiePath = '/' - } else { - // Otherwise: + if (this[kDestroyed] || this[kOnDestroyed]) { + throw new ClientDestroyedError() + } - // 1. Let cookie-path be the attribute-value. - cookiePath = attributeValue + if (this[kClosed]) { + throw new ClientClosedError() + } + + return this[kInterceptedDispatch](opts, handler) + } catch (err) { + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + handler.onError(err) + + return false } + } +} - // 2. Append an attribute to the cookie-attribute-list with an - // attribute-name of Path and an attribute-value of cookie-path. - cookieAttributeList.path = cookiePath - } else if (attributeNameLowercase === 'secure') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 - // If the attribute-name case-insensitively matches the string "Secure", - // the user agent MUST append an attribute to the cookie-attribute-list - // with an attribute-name of Secure and an empty attribute-value. +module.exports = DispatcherBase - cookieAttributeList.secure = true - } else if (attributeNameLowercase === 'httponly') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 - // If the attribute-name case-insensitively matches the string - // "HttpOnly", the user agent MUST append an attribute to the cookie- - // attribute-list with an attribute-name of HttpOnly and an empty - // attribute-value. - cookieAttributeList.httpOnly = true - } else if (attributeNameLowercase === 'samesite') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 - // If the attribute-name case-insensitively matches the string - // "SameSite", the user agent MUST process the cookie-av as follows: +/***/ }), - // 1. Let enforcement be "Default". - let enforcement = 'Default' +/***/ 60412: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const attributeValueLowercase = attributeValue.toLowerCase() - // 2. If cookie-av's attribute-value is a case-insensitive match for - // "None", set enforcement to "None". - if (attributeValueLowercase.includes('none')) { - enforcement = 'None' - } +"use strict"; - // 3. If cookie-av's attribute-value is a case-insensitive match for - // "Strict", set enforcement to "Strict". - if (attributeValueLowercase.includes('strict')) { - enforcement = 'Strict' - } - // 4. If cookie-av's attribute-value is a case-insensitive match for - // "Lax", set enforcement to "Lax". - if (attributeValueLowercase.includes('lax')) { - enforcement = 'Lax' - } +const EventEmitter = __nccwpck_require__(82361) - // 5. Append an attribute to the cookie-attribute-list with an - // attribute-name of "SameSite" and an attribute-value of - // enforcement. - cookieAttributeList.sameSite = enforcement - } else { - cookieAttributeList.unparsed ??= [] +class Dispatcher extends EventEmitter { + dispatch () { + throw new Error('not implemented') + } - cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) + close () { + throw new Error('not implemented') } - // 8. Return to Step 1 of this algorithm. - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + destroy () { + throw new Error('not implemented') + } } -module.exports = { - parseSetCookie, - parseUnparsedAttributes -} +module.exports = Dispatcher /***/ }), -/***/ 3121: +/***/ 41472: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const assert = __nccwpck_require__(9491) -const { kHeadersList } = __nccwpck_require__(2785) +const Busboy = __nccwpck_require__(33438) +const util = __nccwpck_require__(83983) +const { + ReadableStreamFrom, + isBlobLike, + isReadableStreamLike, + readableStreamClose, + createDeferredPromise, + fullyReadBody +} = __nccwpck_require__(52538) +const { FormData } = __nccwpck_require__(72015) +const { kState } = __nccwpck_require__(15861) +const { webidl } = __nccwpck_require__(21744) +const { DOMException, structuredClone } = __nccwpck_require__(41037) +const { Blob, File: NativeFile } = __nccwpck_require__(14300) +const { kBodyUsed } = __nccwpck_require__(72785) +const assert = __nccwpck_require__(39491) +const { isErrored } = __nccwpck_require__(83983) +const { isUint8Array, isArrayBuffer } = __nccwpck_require__(29830) +const { File: UndiciFile } = __nccwpck_require__(78511) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -function isCTLExcludingHtab (value) { - if (value.length === 0) { - return false - } +let ReadableStream = globalThis.ReadableStream - for (const char of value) { - const code = char.charCodeAt(0) +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() - if ( - (code >= 0x00 || code <= 0x08) || - (code >= 0x0A || code <= 0x1F) || - code === 0x7F - ) { - return false - } +// https://fetch.spec.whatwg.org/#concept-bodyinit-extract +function extractBody (object, keepalive = false) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(35356).ReadableStream) } -} -/** - CHAR = - token = 1* - separators = "(" | ")" | "<" | ">" | "@" - | "," | ";" | ":" | "\" | <"> - | "/" | "[" | "]" | "?" | "=" - | "{" | "}" | SP | HT - * @param {string} name - */ -function validateCookieName (name) { - for (const char of name) { - const code = char.charCodeAt(0) + // 1. Let stream be null. + let stream = null - if ( - (code <= 0x20 || code > 0x7F) || - char === '(' || - char === ')' || - char === '>' || - char === '<' || - char === '@' || - char === ',' || - char === ';' || - char === ':' || - char === '\\' || - char === '"' || - char === '/' || - char === '[' || - char === ']' || - char === '?' || - char === '=' || - char === '{' || - char === '}' - ) { - throw new Error('Invalid cookie name') - } + // 2. If object is a ReadableStream object, then set stream to object. + if (object instanceof ReadableStream) { + stream = object + } else if (isBlobLike(object)) { + // 3. Otherwise, if object is a Blob object, set stream to the + // result of running object’s get stream. + stream = object.stream() + } else { + // 4. Otherwise, set stream to a new ReadableStream object, and set + // up stream. + stream = new ReadableStream({ + async pull (controller) { + controller.enqueue( + typeof source === 'string' ? textEncoder.encode(source) : source + ) + queueMicrotask(() => readableStreamClose(controller)) + }, + start () {}, + type: undefined + }) } -} -/** - cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) - cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E - ; US-ASCII characters excluding CTLs, - ; whitespace DQUOTE, comma, semicolon, - ; and backslash - * @param {string} value - */ -function validateCookieValue (value) { - for (const char of value) { - const code = char.charCodeAt(0) + // 5. Assert: stream is a ReadableStream object. + assert(isReadableStreamLike(stream)) - if ( - code < 0x21 || // exclude CTLs (0-31) - code === 0x22 || - code === 0x2C || - code === 0x3B || - code === 0x5C || - code > 0x7E // non-ascii - ) { - throw new Error('Invalid header value') - } - } -} + // 6. Let action be null. + let action = null -/** - * path-value = - * @param {string} path - */ -function validateCookiePath (path) { - for (const char of path) { - const code = char.charCodeAt(0) + // 7. Let source be null. + let source = null - if (code < 0x21 || char === ';') { - throw new Error('Invalid cookie path') - } - } -} + // 8. Let length be null. + let length = null -/** - * I have no idea why these values aren't allowed to be honest, - * but Deno tests these. - Khafra - * @param {string} domain - */ -function validateCookieDomain (domain) { - if ( - domain.startsWith('-') || - domain.endsWith('.') || - domain.endsWith('-') - ) { - throw new Error('Invalid cookie domain') - } -} + // 9. Let type be null. + let type = null -/** - * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 - * @param {number|Date} date - IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT - ; fixed length/zone/capitalization subset of the format - ; see Section 3.3 of [RFC5322] + // 10. Switch on object: + if (typeof object === 'string') { + // Set source to the UTF-8 encoding of object. + // Note: setting source to a Uint8Array here breaks some mocking assumptions. + source = object - day-name = %x4D.6F.6E ; "Mon", case-sensitive - / %x54.75.65 ; "Tue", case-sensitive - / %x57.65.64 ; "Wed", case-sensitive - / %x54.68.75 ; "Thu", case-sensitive - / %x46.72.69 ; "Fri", case-sensitive - / %x53.61.74 ; "Sat", case-sensitive - / %x53.75.6E ; "Sun", case-sensitive - date1 = day SP month SP year - ; e.g., 02 Jun 1982 + // Set type to `text/plain;charset=UTF-8`. + type = 'text/plain;charset=UTF-8' + } else if (object instanceof URLSearchParams) { + // URLSearchParams - day = 2DIGIT - month = %x4A.61.6E ; "Jan", case-sensitive - / %x46.65.62 ; "Feb", case-sensitive - / %x4D.61.72 ; "Mar", case-sensitive - / %x41.70.72 ; "Apr", case-sensitive - / %x4D.61.79 ; "May", case-sensitive - / %x4A.75.6E ; "Jun", case-sensitive - / %x4A.75.6C ; "Jul", case-sensitive - / %x41.75.67 ; "Aug", case-sensitive - / %x53.65.70 ; "Sep", case-sensitive - / %x4F.63.74 ; "Oct", case-sensitive - / %x4E.6F.76 ; "Nov", case-sensitive - / %x44.65.63 ; "Dec", case-sensitive - year = 4DIGIT + // spec says to run application/x-www-form-urlencoded on body.list + // this is implemented in Node.js as apart of an URLSearchParams instance toString method + // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 + // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 - GMT = %x47.4D.54 ; "GMT", case-sensitive + // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. + source = object.toString() - time-of-day = hour ":" minute ":" second - ; 00:00:00 - 23:59:60 (leap second) + // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. + type = 'application/x-www-form-urlencoded;charset=UTF-8' + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer - hour = 2DIGIT - minute = 2DIGIT - second = 2DIGIT - */ -function toIMFDate (date) { - if (typeof date === 'number') { - date = new Date(date) - } + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView - const days = [ - 'Sun', 'Mon', 'Tue', 'Wed', - 'Thu', 'Fri', 'Sat' - ] + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) + } else if (util.isFormDataLike(object)) { + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const prefix = `--${boundary}\r\nContent-Disposition: form-data` - const months = [ - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' - ] + /*! formdata-polyfill. MIT License. Jimmy Wärting */ + const escape = (str) => + str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') + const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') - const dayName = days[date.getUTCDay()] - const day = date.getUTCDate().toString().padStart(2, '0') - const month = months[date.getUTCMonth()] - const year = date.getUTCFullYear() - const hour = date.getUTCHours().toString().padStart(2, '0') - const minute = date.getUTCMinutes().toString().padStart(2, '0') - const second = date.getUTCSeconds().toString().padStart(2, '0') + // Set action to this step: run the multipart/form-data + // encoding algorithm, with object’s entry list and UTF-8. + // - This ensures that the body is immutable and can't be changed afterwords + // - That the content-length is calculated in advance. + // - And that all parts are pre-encoded and ready to be sent. - return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` -} + const blobParts = [] + const rn = new Uint8Array([13, 10]) // '\r\n' + length = 0 + let hasUnknownSizeValue = false -/** - max-age-av = "Max-Age=" non-zero-digit *DIGIT - ; In practice, both expires-av and max-age-av - ; are limited to dates representable by the - ; user agent. - * @param {number} maxAge - */ -function validateCookieMaxAge (maxAge) { - if (maxAge < 0) { - throw new Error('Invalid cookie max-age') - } -} + for (const [name, value] of object) { + if (typeof value === 'string') { + const chunk = textEncoder.encode(prefix + + `; name="${escape(normalizeLinefeeds(name))}"` + + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) + blobParts.push(chunk) + length += chunk.byteLength + } else { + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + + `Content-Type: ${ + value.type || 'application/octet-stream' + }\r\n\r\n`) + blobParts.push(chunk, value, rn) + if (typeof value.size === 'number') { + length += chunk.byteLength + value.size + rn.byteLength + } else { + hasUnknownSizeValue = true + } + } + } -/** - * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 - * @param {import('./index').Cookie} cookie - */ -function stringify (cookie) { - if (cookie.name.length === 0) { - return null - } + const chunk = textEncoder.encode(`--${boundary}--`) + blobParts.push(chunk) + length += chunk.byteLength + if (hasUnknownSizeValue) { + length = null + } - validateCookieName(cookie.name) - validateCookieValue(cookie.value) + // Set source to object. + source = object - const out = [`${cookie.name}=${cookie.value}`] + action = async function * () { + for (const part of blobParts) { + if (part.stream) { + yield * part.stream() + } else { + yield part + } + } + } - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 - if (cookie.name.startsWith('__Secure-')) { - cookie.secure = true - } + // Set type to `multipart/form-data; boundary=`, + // followed by the multipart/form-data boundary string generated + // by the multipart/form-data encoding algorithm. + type = 'multipart/form-data; boundary=' + boundary + } else if (isBlobLike(object)) { + // Blob - if (cookie.name.startsWith('__Host-')) { - cookie.secure = true - cookie.domain = null - cookie.path = '/' + // Set source to object. + source = object + + // Set length to object’s size. + length = object.size + + // If object’s type attribute is not the empty byte sequence, set + // type to its value. + if (object.type) { + type = object.type + } + } else if (typeof object[Symbol.asyncIterator] === 'function') { + // If keepalive is true, then throw a TypeError. + if (keepalive) { + throw new TypeError('keepalive') + } + + // If object is disturbed or locked, then throw a TypeError. + if (util.isDisturbed(object) || object.locked) { + throw new TypeError( + 'Response body object should not be disturbed or locked' + ) + } + + stream = + object instanceof ReadableStream ? object : ReadableStreamFrom(object) } - if (cookie.secure) { - out.push('Secure') + // 11. If source is a byte sequence, then set action to a + // step that returns source and length to source’s length. + if (typeof source === 'string' || util.isBuffer(source)) { + length = Buffer.byteLength(source) } - if (cookie.httpOnly) { - out.push('HttpOnly') + // 12. If action is non-null, then run these steps in in parallel: + if (action != null) { + // Run action. + let iterator + stream = new ReadableStream({ + async start () { + iterator = action(object)[Symbol.asyncIterator]() + }, + async pull (controller) { + const { value, done } = await iterator.next() + if (done) { + // When running action is done, close stream. + queueMicrotask(() => { + controller.close() + }) + } else { + // Whenever one or more bytes are available and stream is not errored, + // enqueue a Uint8Array wrapping an ArrayBuffer containing the available + // bytes into stream. + if (!isErrored(stream)) { + controller.enqueue(new Uint8Array(value)) + } + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + }, + type: undefined + }) } - if (typeof cookie.maxAge === 'number') { - validateCookieMaxAge(cookie.maxAge) - out.push(`Max-Age=${cookie.maxAge}`) - } + // 13. Let body be a body whose stream is stream, source is source, + // and length is length. + const body = { stream, source, length } - if (cookie.domain) { - validateCookieDomain(cookie.domain) - out.push(`Domain=${cookie.domain}`) - } + // 14. Return (body, type). + return [body, type] +} - if (cookie.path) { - validateCookiePath(cookie.path) - out.push(`Path=${cookie.path}`) +// https://fetch.spec.whatwg.org/#bodyinit-safely-extract +function safelyExtractBody (object, keepalive = false) { + if (!ReadableStream) { + // istanbul ignore next + ReadableStream = (__nccwpck_require__(35356).ReadableStream) } - if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { - out.push(`Expires=${toIMFDate(cookie.expires)}`) - } + // To safely extract a body and a `Content-Type` value from + // a byte sequence or BodyInit object object, run these steps: - if (cookie.sameSite) { - out.push(`SameSite=${cookie.sameSite}`) + // 1. If object is a ReadableStream object, then: + if (object instanceof ReadableStream) { + // Assert: object is neither disturbed nor locked. + // istanbul ignore next + assert(!util.isDisturbed(object), 'The body has already been consumed.') + // istanbul ignore next + assert(!object.locked, 'The stream is locked.') } - for (const part of cookie.unparsed) { - if (!part.includes('=')) { - throw new Error('Invalid unparsed') - } + // 2. Return the results of extracting object. + return extractBody(object, keepalive) +} - const [key, ...value] = part.split('=') +function cloneBody (body) { + // To clone a body body, run these steps: - out.push(`${key.trim()}=${value.join('=')}`) - } + // https://fetch.spec.whatwg.org/#concept-body-clone - return out.join('; ') -} + // 1. Let « out1, out2 » be the result of teeing body’s stream. + const [out1, out2] = body.stream.tee() + const out2Clone = structuredClone(out2, { transfer: [out2] }) + // This, for whatever reasons, unrefs out2Clone which allows + // the process to exit by itself. + const [, finalClone] = out2Clone.tee() -let kHeadersListNode + // 2. Set body’s stream to out1. + body.stream = out1 -function getHeadersList (headers) { - if (headers[kHeadersList]) { - return headers[kHeadersList] + // 3. Return a body whose stream is out2 and other members are copied from body. + return { + stream: finalClone, + length: body.length, + source: body.source } +} - if (!kHeadersListNode) { - kHeadersListNode = Object.getOwnPropertySymbols(headers).find( - (symbol) => symbol.description === 'headers list' - ) +async function * consumeBody (body) { + if (body) { + if (isUint8Array(body)) { + yield body + } else { + const stream = body.stream - assert(kHeadersListNode, 'Headers cannot be parsed') - } + if (util.isDisturbed(stream)) { + throw new TypeError('The body has already been consumed.') + } - const headersList = headers[kHeadersListNode] - assert(headersList) + if (stream.locked) { + throw new TypeError('The stream is locked.') + } - return headersList -} + // Compat. + stream[kBodyUsed] = true -module.exports = { - isCTLExcludingHtab, - stringify, - getHeadersList + yield * stream + } + } } +function throwIfAborted (state) { + if (state.aborted) { + throw new DOMException('The operation was aborted.', 'AbortError') + } +} -/***/ }), - -/***/ 2067: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - +function bodyMixinMethods (instance) { + const methods = { + blob () { + // The blob() method steps are to return the result of + // running consume body with this and the following step + // given a byte sequence bytes: return a Blob whose + // contents are bytes and whose type attribute is this’s + // MIME type. + return specConsumeBody(this, (bytes) => { + let mimeType = bodyMimeType(this) -const net = __nccwpck_require__(1808) -const assert = __nccwpck_require__(9491) -const util = __nccwpck_require__(3983) -const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(8045) + if (mimeType === 'failure') { + mimeType = '' + } else if (mimeType) { + mimeType = serializeAMimeType(mimeType) + } -let tls // include tls conditionally since it is not always available + // Return a Blob whose contents are bytes and type attribute + // is mimeType. + return new Blob([bytes], { type: mimeType }) + }, instance) + }, -// TODO: session re-use does not wait for the first -// connection to resolve the session and might therefore -// resolve the same servername multiple times even when -// re-use is enabled. + arrayBuffer () { + // The arrayBuffer() method steps are to return the result + // of running consume body with this and the following step + // given a byte sequence bytes: return a new ArrayBuffer + // whose contents are bytes. + return specConsumeBody(this, (bytes) => { + return new Uint8Array(bytes).buffer + }, instance) + }, -let SessionCache -// FIXME: remove workaround when the Node bug is fixed -// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 -if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { - SessionCache = class WeakSessionCache { - constructor (maxCachedSessions) { - this._maxCachedSessions = maxCachedSessions - this._sessionCache = new Map() - this._sessionRegistry = new global.FinalizationRegistry((key) => { - if (this._sessionCache.size < this._maxCachedSessions) { - return - } + text () { + // The text() method steps are to return the result of running + // consume body with this and UTF-8 decode. + return specConsumeBody(this, utf8DecodeBytes, instance) + }, - const ref = this._sessionCache.get(key) - if (ref !== undefined && ref.deref() === undefined) { - this._sessionCache.delete(key) - } - }) - } + json () { + // The json() method steps are to return the result of running + // consume body with this and parse JSON from bytes. + return specConsumeBody(this, parseJSONFromBytes, instance) + }, - get (sessionKey) { - const ref = this._sessionCache.get(sessionKey) - return ref ? ref.deref() : null - } + async formData () { + webidl.brandCheck(this, instance) - set (sessionKey, session) { - if (this._maxCachedSessions === 0) { - return - } + throwIfAborted(this[kState]) - this._sessionCache.set(sessionKey, new WeakRef(session)) - this._sessionRegistry.register(session, sessionKey) - } - } -} else { - SessionCache = class SimpleSessionCache { - constructor (maxCachedSessions) { - this._maxCachedSessions = maxCachedSessions - this._sessionCache = new Map() - } + const contentType = this.headers.get('Content-Type') - get (sessionKey) { - return this._sessionCache.get(sessionKey) - } + // If mimeType’s essence is "multipart/form-data", then: + if (/multipart\/form-data/.test(contentType)) { + const headers = {} + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value - set (sessionKey, session) { - if (this._maxCachedSessions === 0) { - return - } + const responseFormData = new FormData() - if (this._sessionCache.size >= this._maxCachedSessions) { - // remove the oldest session - const { value: oldestKey } = this._sessionCache.keys().next() - this._sessionCache.delete(oldestKey) - } + let busboy - this._sessionCache.set(sessionKey, session) - } - } -} + try { + busboy = new Busboy({ + headers, + preservePath: true + }) + } catch (err) { + throw new DOMException(`${err}`, 'AbortError') + } -function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { - if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { - throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') - } + busboy.on('field', (name, value) => { + responseFormData.append(name, value) + }) + busboy.on('file', (name, value, filename, encoding, mimeType) => { + const chunks = [] - const options = { path: socketPath, ...opts } - const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) - timeout = timeout == null ? 10e3 : timeout - allowH2 = allowH2 != null ? allowH2 : false - return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { - let socket - if (protocol === 'https:') { - if (!tls) { - tls = __nccwpck_require__(4404) - } - servername = servername || options.servername || util.getServerName(host) || null + if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { + let base64chunk = '' - const sessionKey = servername || hostname - const session = sessionCache.get(sessionKey) || null + value.on('data', (chunk) => { + base64chunk += chunk.toString().replace(/[\r\n]/gm, '') - assert(sessionKey) + const end = base64chunk.length - base64chunk.length % 4 + chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) - socket = tls.connect({ - highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... - ...options, - servername, - session, - localAddress, - // TODO(HTTP/2): Add support for h2c - ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], - socket: httpSocket, // upgrade socket connection - port: port || 443, - host: hostname - }) + base64chunk = base64chunk.slice(end) + }) + value.on('end', () => { + chunks.push(Buffer.from(base64chunk, 'base64')) + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } else { + value.on('data', (chunk) => { + chunks.push(chunk) + }) + value.on('end', () => { + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } + }) - socket - .on('session', function (session) { - // TODO (fix): Can a session become invalid once established? Don't think so? - sessionCache.set(sessionKey, session) + const busboyResolve = new Promise((resolve, reject) => { + busboy.on('finish', resolve) + busboy.on('error', (err) => reject(new TypeError(err))) }) - } else { - assert(!httpSocket, 'httpSocket can only be sent on TLS update') - socket = net.connect({ - highWaterMark: 64 * 1024, // Same as nodejs fs streams. - ...options, - localAddress, - port: port || 80, - host: hostname - }) - } - // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket - if (options.keepAlive == null || options.keepAlive) { - const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay - socket.setKeepAlive(true, keepAliveInitialDelay) - } + if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) + busboy.end() + await busboyResolve - const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + return responseFormData + } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: - socket - .setNoDelay(true) - .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { - cancelTimeout() + // 1. Let entries be the result of parsing bytes. + let entries + try { + let text = '' + // application/x-www-form-urlencoded parser will keep the BOM. + // https://url.spec.whatwg.org/#concept-urlencoded-parser + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) - if (callback) { - const cb = callback - callback = null - cb(null, this) + for await (const chunk of consumeBody(this[kState].body)) { + if (!isUint8Array(chunk)) { + throw new TypeError('Expected Uint8Array chunk') + } + text += streamingDecoder.decode(chunk, { stream: true }) + } + text += streamingDecoder.decode() + entries = new URLSearchParams(text) + } catch (err) { + // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. + // 2. If entries is failure, then throw a TypeError. + throw Object.assign(new TypeError(), { cause: err }) } - }) - .on('error', function (err) { - cancelTimeout() - if (callback) { - const cb = callback - callback = null - cb(err) + // 3. Return a new FormData object whose entries are entries. + const formData = new FormData() + for (const [name, value] of entries) { + formData.append(name, value) } - }) - - return socket - } -} - -function setupTimeout (onConnectTimeout, timeout) { - if (!timeout) { - return () => {} - } - - let s1 = null - let s2 = null - const timeoutId = setTimeout(() => { - // setImmediate is added to make sure that we priotorise socket error events over timeouts - s1 = setImmediate(() => { - if (process.platform === 'win32') { - // Windows needs an extra setImmediate probably due to implementation differences in the socket logic - s2 = setImmediate(() => onConnectTimeout()) + return formData } else { - onConnectTimeout() - } - }) - }, timeout) - return () => { - clearTimeout(timeoutId) - clearImmediate(s1) - clearImmediate(s2) - } -} - -function onConnectTimeout (socket) { - util.destroy(socket, new ConnectTimeoutError()) -} - -module.exports = buildConnector - - -/***/ }), - -/***/ 8045: -/***/ ((module) => { - -"use strict"; - + // Wait a tick before checking if the request has been aborted. + // Otherwise, a TypeError can be thrown when an AbortError should. + await Promise.resolve() -class UndiciError extends Error { - constructor (message) { - super(message) - this.name = 'UndiciError' - this.code = 'UND_ERR' - } -} + throwIfAborted(this[kState]) -class ConnectTimeoutError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ConnectTimeoutError) - this.name = 'ConnectTimeoutError' - this.message = message || 'Connect Timeout Error' - this.code = 'UND_ERR_CONNECT_TIMEOUT' + // Otherwise, throw a TypeError. + throw webidl.errors.exception({ + header: `${instance.name}.formData`, + message: 'Could not parse content as FormData.' + }) + } + } } -} -class HeadersTimeoutError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, HeadersTimeoutError) - this.name = 'HeadersTimeoutError' - this.message = message || 'Headers Timeout Error' - this.code = 'UND_ERR_HEADERS_TIMEOUT' - } + return methods } -class HeadersOverflowError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, HeadersOverflowError) - this.name = 'HeadersOverflowError' - this.message = message || 'Headers Overflow Error' - this.code = 'UND_ERR_HEADERS_OVERFLOW' - } +function mixinBody (prototype) { + Object.assign(prototype.prototype, bodyMixinMethods(prototype)) } -class BodyTimeoutError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, BodyTimeoutError) - this.name = 'BodyTimeoutError' - this.message = message || 'Body Timeout Error' - this.code = 'UND_ERR_BODY_TIMEOUT' - } -} +/** + * @see https://fetch.spec.whatwg.org/#concept-body-consume-body + * @param {Response|Request} object + * @param {(value: unknown) => unknown} convertBytesToJSValue + * @param {Response|Request} instance + */ +async function specConsumeBody (object, convertBytesToJSValue, instance) { + webidl.brandCheck(object, instance) -class ResponseStatusCodeError extends UndiciError { - constructor (message, statusCode, headers, body) { - super(message) - Error.captureStackTrace(this, ResponseStatusCodeError) - this.name = 'ResponseStatusCodeError' - this.message = message || 'Response Status Code Error' - this.code = 'UND_ERR_RESPONSE_STATUS_CODE' - this.body = body - this.status = statusCode - this.statusCode = statusCode - this.headers = headers - } -} + throwIfAborted(object[kState]) -class InvalidArgumentError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, InvalidArgumentError) - this.name = 'InvalidArgumentError' - this.message = message || 'Invalid Argument Error' - this.code = 'UND_ERR_INVALID_ARG' + // 1. If object is unusable, then return a promise rejected + // with a TypeError. + if (bodyUnusable(object[kState].body)) { + throw new TypeError('Body is unusable') } -} -class InvalidReturnValueError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, InvalidReturnValueError) - this.name = 'InvalidReturnValueError' - this.message = message || 'Invalid Return Value Error' - this.code = 'UND_ERR_INVALID_RETURN_VALUE' - } -} + // 2. Let promise be a new promise. + const promise = createDeferredPromise() -class RequestAbortedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, RequestAbortedError) - this.name = 'AbortError' - this.message = message || 'Request aborted' - this.code = 'UND_ERR_ABORTED' - } -} + // 3. Let errorSteps given error be to reject promise with error. + const errorSteps = (error) => promise.reject(error) -class InformationalError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, InformationalError) - this.name = 'InformationalError' - this.message = message || 'Request information' - this.code = 'UND_ERR_INFO' + // 4. Let successSteps given a byte sequence data be to resolve + // promise with the result of running convertBytesToJSValue + // with data. If that threw an exception, then run errorSteps + // with that exception. + const successSteps = (data) => { + try { + promise.resolve(convertBytesToJSValue(data)) + } catch (e) { + errorSteps(e) + } } -} -class RequestContentLengthMismatchError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, RequestContentLengthMismatchError) - this.name = 'RequestContentLengthMismatchError' - this.message = message || 'Request body length does not match content-length header' - this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' + // 5. If object’s body is null, then run successSteps with an + // empty byte sequence. + if (object[kState].body == null) { + successSteps(new Uint8Array()) + return promise.promise } -} -class ResponseContentLengthMismatchError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ResponseContentLengthMismatchError) - this.name = 'ResponseContentLengthMismatchError' - this.message = message || 'Response body length does not match content-length header' - this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' - } -} + // 6. Otherwise, fully read object’s body given successSteps, + // errorSteps, and object’s relevant global object. + await fullyReadBody(object[kState].body, successSteps, errorSteps) -class ClientDestroyedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ClientDestroyedError) - this.name = 'ClientDestroyedError' - this.message = message || 'The client is destroyed' - this.code = 'UND_ERR_DESTROYED' - } + // 7. Return promise. + return promise.promise } -class ClientClosedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ClientClosedError) - this.name = 'ClientClosedError' - this.message = message || 'The client is closed' - this.code = 'UND_ERR_CLOSED' - } +// https://fetch.spec.whatwg.org/#body-unusable +function bodyUnusable (body) { + // An object including the Body interface mixin is + // said to be unusable if its body is non-null and + // its body’s stream is disturbed or locked. + return body != null && (body.stream.locked || util.isDisturbed(body.stream)) } -class SocketError extends UndiciError { - constructor (message, socket) { - super(message) - Error.captureStackTrace(this, SocketError) - this.name = 'SocketError' - this.message = message || 'Socket error' - this.code = 'UND_ERR_SOCKET' - this.socket = socket +/** + * @see https://encoding.spec.whatwg.org/#utf-8-decode + * @param {Buffer} buffer + */ +function utf8DecodeBytes (buffer) { + if (buffer.length === 0) { + return '' } -} -class NotSupportedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, NotSupportedError) - this.name = 'NotSupportedError' - this.message = message || 'Not supported error' - this.code = 'UND_ERR_NOT_SUPPORTED' - } -} + // 1. Let buffer be the result of peeking three bytes from + // ioQueue, converted to a byte sequence. -class BalancedPoolMissingUpstreamError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, NotSupportedError) - this.name = 'MissingUpstreamError' - this.message = message || 'No upstream has been added to the BalancedPool' - this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' + // 2. If buffer is 0xEF 0xBB 0xBF, then read three + // bytes from ioQueue. (Do nothing with those bytes.) + if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { + buffer = buffer.subarray(3) } -} -class HTTPParserError extends Error { - constructor (message, code, data) { - super(message) - Error.captureStackTrace(this, HTTPParserError) - this.name = 'HTTPParserError' - this.code = code ? `HPE_${code}` : undefined - this.data = data ? data.toString() : undefined - } + // 3. Process a queue with an instance of UTF-8’s + // decoder, ioQueue, output, and "replacement". + const output = textDecoder.decode(buffer) + + // 4. Return output. + return output } -class ResponseExceededMaxSizeError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ResponseExceededMaxSizeError) - this.name = 'ResponseExceededMaxSizeError' - this.message = message || 'Response content exceeded max size' - this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' - } +/** + * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value + * @param {Uint8Array} bytes + */ +function parseJSONFromBytes (bytes) { + return JSON.parse(utf8DecodeBytes(bytes)) } -class RequestRetryError extends UndiciError { - constructor (message, code, { headers, data }) { - super(message) - Error.captureStackTrace(this, RequestRetryError) - this.name = 'RequestRetryError' - this.message = message || 'Request retry error' - this.code = 'UND_ERR_REQ_RETRY' - this.statusCode = code - this.data = data - this.headers = headers +/** + * @see https://fetch.spec.whatwg.org/#concept-body-mime-type + * @param {import('./response').Response|import('./request').Request} object + */ +function bodyMimeType (object) { + const { headersList } = object[kState] + const contentType = headersList.get('content-type') + + if (contentType === null) { + return 'failure' } + + return parseMIMEType(contentType) } module.exports = { - HTTPParserError, - UndiciError, - HeadersTimeoutError, - HeadersOverflowError, - BodyTimeoutError, - RequestContentLengthMismatchError, - ConnectTimeoutError, - ResponseStatusCodeError, - InvalidArgumentError, - InvalidReturnValueError, - RequestAbortedError, - ClientDestroyedError, - ClientClosedError, - InformationalError, - SocketError, - NotSupportedError, - ResponseContentLengthMismatchError, - BalancedPoolMissingUpstreamError, - ResponseExceededMaxSizeError, - RequestRetryError + extractBody, + safelyExtractBody, + cloneBody, + mixinBody } /***/ }), -/***/ 2905: +/***/ 41037: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { - InvalidArgumentError, - NotSupportedError -} = __nccwpck_require__(8045) -const assert = __nccwpck_require__(9491) -const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(2785) -const util = __nccwpck_require__(3983) - -// tokenRegExp and headerCharRegex have been lifted from -// https://github.com/nodejs/node/blob/main/lib/_http_common.js - -/** - * Verifies that the given val is a valid HTTP token - * per the rules defined in RFC 7230 - * See https://tools.ietf.org/html/rfc7230#section-3.2.6 - */ -const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ - -/** - * Matches if val contains an invalid field-vchar - * field-value = *( field-content / obs-fold ) - * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] - * field-vchar = VCHAR / obs-text - */ -const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - -// Verifies that a given path is valid does not contain control chars \x00 to \x20 -const invalidPathRegex = /[^\u0021-\u00ff]/ - -const kHandler = Symbol('handler') +const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(71267) -const channels = {} +const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) -let extractBody +const nullBodyStatus = [101, 204, 205, 304] -try { - const diagnosticsChannel = __nccwpck_require__(7643) - channels.create = diagnosticsChannel.channel('undici:request:create') - channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') - channels.headers = diagnosticsChannel.channel('undici:request:headers') - channels.trailers = diagnosticsChannel.channel('undici:request:trailers') - channels.error = diagnosticsChannel.channel('undici:request:error') -} catch { - channels.create = { hasSubscribers: false } - channels.bodySent = { hasSubscribers: false } - channels.headers = { hasSubscribers: false } - channels.trailers = { hasSubscribers: false } - channels.error = { hasSubscribers: false } -} +const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) -class Request { - constructor (origin, { - path, - method, - body, - headers, - query, - idempotent, - blocking, - upgrade, - headersTimeout, - bodyTimeout, - reset, - throwOnError, - expectContinue - }, handler) { - if (typeof path !== 'string') { - throw new InvalidArgumentError('path must be a string') - } else if ( - path[0] !== '/' && - !(path.startsWith('http://') || path.startsWith('https://')) && - method !== 'CONNECT' - ) { - throw new InvalidArgumentError('path must be an absolute URL or start with a slash') - } else if (invalidPathRegex.exec(path) !== null) { - throw new InvalidArgumentError('invalid request path') - } +// https://fetch.spec.whatwg.org/#block-bad-port +const badPorts = [ + '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', + '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', + '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', + '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', + '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', + '10080' +] - if (typeof method !== 'string') { - throw new InvalidArgumentError('method must be a string') - } else if (tokenRegExp.exec(method) === null) { - throw new InvalidArgumentError('invalid request method') - } +const badPortsSet = new Set(badPorts) - if (upgrade && typeof upgrade !== 'string') { - throw new InvalidArgumentError('upgrade must be a string') - } +// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies +const referrerPolicy = [ + '', + 'no-referrer', + 'no-referrer-when-downgrade', + 'same-origin', + 'origin', + 'strict-origin', + 'origin-when-cross-origin', + 'strict-origin-when-cross-origin', + 'unsafe-url' +] +const referrerPolicySet = new Set(referrerPolicy) - if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { - throw new InvalidArgumentError('invalid headersTimeout') - } +const requestRedirect = ['follow', 'manual', 'error'] - if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { - throw new InvalidArgumentError('invalid bodyTimeout') - } +const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) - if (reset != null && typeof reset !== 'boolean') { - throw new InvalidArgumentError('invalid reset') - } +const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] - if (expectContinue != null && typeof expectContinue !== 'boolean') { - throw new InvalidArgumentError('invalid expectContinue') - } +const requestCredentials = ['omit', 'same-origin', 'include'] - this.headersTimeout = headersTimeout +const requestCache = [ + 'default', + 'no-store', + 'reload', + 'no-cache', + 'force-cache', + 'only-if-cached' +] - this.bodyTimeout = bodyTimeout +// https://fetch.spec.whatwg.org/#request-body-header-name +const requestBodyHeader = [ + 'content-encoding', + 'content-language', + 'content-location', + 'content-type', + // See https://github.com/nodejs/undici/issues/2021 + // 'Content-Length' is a forbidden header name, which is typically + // removed in the Headers implementation. However, undici doesn't + // filter out headers, so we add it here. + 'content-length' +] - this.throwOnError = throwOnError === true +// https://fetch.spec.whatwg.org/#enumdef-requestduplex +const requestDuplex = [ + 'half' +] - this.method = method +// http://fetch.spec.whatwg.org/#forbidden-method +const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) - this.abort = null +const subresource = [ + 'audio', + 'audioworklet', + 'font', + 'image', + 'manifest', + 'paintworklet', + 'script', + 'style', + 'track', + 'video', + 'xslt', + '' +] +const subresourceSet = new Set(subresource) - if (body == null) { - this.body = null - } else if (util.isStream(body)) { - this.body = body +/** @type {globalThis['DOMException']} */ +const DOMException = globalThis.DOMException ?? (() => { + // DOMException was only made a global in Node v17.0.0, + // but fetch supports >= v16.8. + try { + atob('~') + } catch (err) { + return Object.getPrototypeOf(err).constructor + } +})() - const rState = this.body._readableState - if (!rState || !rState.autoDestroy) { - this.endHandler = function autoDestroy () { - util.destroy(this) - } - this.body.on('end', this.endHandler) - } +let channel - this.errorHandler = err => { - if (this.abort) { - this.abort(err) - } else { - this.error = err - } - } - this.body.on('error', this.errorHandler) - } else if (util.isBuffer(body)) { - this.body = body.byteLength ? body : null - } else if (ArrayBuffer.isView(body)) { - this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null - } else if (body instanceof ArrayBuffer) { - this.body = body.byteLength ? Buffer.from(body) : null - } else if (typeof body === 'string') { - this.body = body.length ? Buffer.from(body) : null - } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { - this.body = body - } else { - throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') +/** @type {globalThis['structuredClone']} */ +const structuredClone = + globalThis.structuredClone ?? + // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js + // structuredClone was added in v17.0.0, but fetch supports v16.8 + function structuredClone (value, options = undefined) { + if (arguments.length === 0) { + throw new TypeError('missing argument') } - this.completed = false - - this.aborted = false - - this.upgrade = upgrade || null - - this.path = query ? util.buildURL(path, query) : path + if (!channel) { + channel = new MessageChannel() + } + channel.port1.unref() + channel.port2.unref() + channel.port1.postMessage(value, options?.transfer) + return receiveMessageOnPort(channel.port2).message + } - this.origin = origin +module.exports = { + DOMException, + structuredClone, + subresource, + forbiddenMethods, + requestBodyHeader, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + redirectStatus, + corsSafeListedMethods, + nullBodyStatus, + safeMethods, + badPorts, + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet +} - this.idempotent = idempotent == null - ? method === 'HEAD' || method === 'GET' - : idempotent - this.blocking = blocking == null ? false : blocking +/***/ }), - this.reset = reset == null ? null : reset +/***/ 685: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.host = null +const assert = __nccwpck_require__(39491) +const { atob } = __nccwpck_require__(14300) +const { isomorphicDecode } = __nccwpck_require__(52538) - this.contentLength = null +const encoder = new TextEncoder() - this.contentType = null +/** + * @see https://mimesniff.spec.whatwg.org/#http-token-code-point + */ +const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ +const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line +/** + * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point + */ +const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line - this.headers = '' +// https://fetch.spec.whatwg.org/#data-url-processor +/** @param {URL} dataURL */ +function dataURLProcessor (dataURL) { + // 1. Assert: dataURL’s scheme is "data". + assert(dataURL.protocol === 'data:') - // Only for H2 - this.expectContinue = expectContinue != null ? expectContinue : false + // 2. Let input be the result of running the URL + // serializer on dataURL with exclude fragment + // set to true. + let input = URLSerializer(dataURL, true) - if (Array.isArray(headers)) { - if (headers.length % 2 !== 0) { - throw new InvalidArgumentError('headers array must be even') - } - for (let i = 0; i < headers.length; i += 2) { - processHeader(this, headers[i], headers[i + 1]) - } - } else if (headers && typeof headers === 'object') { - const keys = Object.keys(headers) - for (let i = 0; i < keys.length; i++) { - const key = keys[i] - processHeader(this, key, headers[key]) - } - } else if (headers != null) { - throw new InvalidArgumentError('headers must be an object or an array') - } + // 3. Remove the leading "data:" string from input. + input = input.slice(5) - if (util.isFormDataLike(this.body)) { - if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { - throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') - } + // 4. Let position point at the start of input. + const position = { position: 0 } - if (!extractBody) { - extractBody = (__nccwpck_require__(9990).extractBody) - } + // 5. Let mimeType be the result of collecting a + // sequence of code points that are not equal + // to U+002C (,), given position. + let mimeType = collectASequenceOfCodePointsFast( + ',', + input, + position + ) - const [bodyStream, contentType] = extractBody(body) - if (this.contentType == null) { - this.contentType = contentType - this.headers += `content-type: ${contentType}\r\n` - } - this.body = bodyStream.stream - this.contentLength = bodyStream.length - } else if (util.isBlobLike(body) && this.contentType == null && body.type) { - this.contentType = body.type - this.headers += `content-type: ${body.type}\r\n` - } + // 6. Strip leading and trailing ASCII whitespace + // from mimeType. + // Undici implementation note: we need to store the + // length because if the mimetype has spaces removed, + // the wrong amount will be sliced from the input in + // step #9 + const mimeTypeLength = mimeType.length + mimeType = removeASCIIWhitespace(mimeType, true, true) - util.validateHandler(handler, method, upgrade) + // 7. If position is past the end of input, then + // return failure + if (position.position >= input.length) { + return 'failure' + } - this.servername = util.getServerName(this.host) + // 8. Advance position by 1. + position.position++ - this[kHandler] = handler + // 9. Let encodedBody be the remainder of input. + const encodedBody = input.slice(mimeTypeLength + 1) - if (channels.create.hasSubscribers) { - channels.create.publish({ request: this }) - } - } + // 10. Let body be the percent-decoding of encodedBody. + let body = stringPercentDecode(encodedBody) - onBodySent (chunk) { - if (this[kHandler].onBodySent) { - try { - return this[kHandler].onBodySent(chunk) - } catch (err) { - this.abort(err) - } - } - } + // 11. If mimeType ends with U+003B (;), followed by + // zero or more U+0020 SPACE, followed by an ASCII + // case-insensitive match for "base64", then: + if (/;(\u0020){0,}base64$/i.test(mimeType)) { + // 1. Let stringBody be the isomorphic decode of body. + const stringBody = isomorphicDecode(body) - onRequestSent () { - if (channels.bodySent.hasSubscribers) { - channels.bodySent.publish({ request: this }) - } + // 2. Set body to the forgiving-base64 decode of + // stringBody. + body = forgivingBase64(stringBody) - if (this[kHandler].onRequestSent) { - try { - return this[kHandler].onRequestSent() - } catch (err) { - this.abort(err) - } + // 3. If body is failure, then return failure. + if (body === 'failure') { + return 'failure' } - } - - onConnect (abort) { - assert(!this.aborted) - assert(!this.completed) - if (this.error) { - abort(this.error) - } else { - this.abort = abort - return this[kHandler].onConnect(abort) - } - } + // 4. Remove the last 6 code points from mimeType. + mimeType = mimeType.slice(0, -6) - onHeaders (statusCode, headers, resume, statusText) { - assert(!this.aborted) - assert(!this.completed) + // 5. Remove trailing U+0020 SPACE code points from mimeType, + // if any. + mimeType = mimeType.replace(/(\u0020)+$/, '') - if (channels.headers.hasSubscribers) { - channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) - } + // 6. Remove the last U+003B (;) code point from mimeType. + mimeType = mimeType.slice(0, -1) + } - try { - return this[kHandler].onHeaders(statusCode, headers, resume, statusText) - } catch (err) { - this.abort(err) - } + // 12. If mimeType starts with U+003B (;), then prepend + // "text/plain" to mimeType. + if (mimeType.startsWith(';')) { + mimeType = 'text/plain' + mimeType } - onData (chunk) { - assert(!this.aborted) - assert(!this.completed) + // 13. Let mimeTypeRecord be the result of parsing + // mimeType. + let mimeTypeRecord = parseMIMEType(mimeType) - try { - return this[kHandler].onData(chunk) - } catch (err) { - this.abort(err) - return false - } + // 14. If mimeTypeRecord is failure, then set + // mimeTypeRecord to text/plain;charset=US-ASCII. + if (mimeTypeRecord === 'failure') { + mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') } - onUpgrade (statusCode, headers, socket) { - assert(!this.aborted) - assert(!this.completed) + // 15. Return a new data: URL struct whose MIME + // type is mimeTypeRecord and body is body. + // https://fetch.spec.whatwg.org/#data-url-struct + return { mimeType: mimeTypeRecord, body } +} - return this[kHandler].onUpgrade(statusCode, headers, socket) +// https://url.spec.whatwg.org/#concept-url-serializer +/** + * @param {URL} url + * @param {boolean} excludeFragment + */ +function URLSerializer (url, excludeFragment = false) { + if (!excludeFragment) { + return url.href } - onComplete (trailers) { - this.onFinally() + const href = url.href + const hashLength = url.hash.length - assert(!this.aborted) + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) +} - this.completed = true - if (channels.trailers.hasSubscribers) { - channels.trailers.publish({ request: this, trailers }) - } +// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points +/** + * @param {(char: string) => boolean} condition + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePoints (condition, input, position) { + // 1. Let result be the empty string. + let result = '' - try { - return this[kHandler].onComplete(trailers) - } catch (err) { - // TODO (fix): This might be a bad idea? - this.onError(err) - } - } + // 2. While position doesn’t point past the end of input and the + // code point at position within input meets the condition condition: + while (position.position < input.length && condition(input[position.position])) { + // 1. Append that code point to the end of result. + result += input[position.position] - onError (error) { - this.onFinally() + // 2. Advance position by 1. + position.position++ + } - if (channels.error.hasSubscribers) { - channels.error.publish({ request: this, error }) - } + // 3. Return result. + return result +} - if (this.aborted) { - return - } - this.aborted = true +/** + * A faster collectASequenceOfCodePoints that only works when comparing a single character. + * @param {string} char + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePointsFast (char, input, position) { + const idx = input.indexOf(char, position.position) + const start = position.position - return this[kHandler].onError(error) + if (idx === -1) { + position.position = input.length + return input.slice(start) } - onFinally () { - if (this.errorHandler) { - this.body.off('error', this.errorHandler) - this.errorHandler = null - } + position.position = idx + return input.slice(start, position.position) +} - if (this.endHandler) { - this.body.off('end', this.endHandler) - this.endHandler = null - } - } +// https://url.spec.whatwg.org/#string-percent-decode +/** @param {string} input */ +function stringPercentDecode (input) { + // 1. Let bytes be the UTF-8 encoding of input. + const bytes = encoder.encode(input) - // TODO: adjust to support H2 - addHeader (key, value) { - processHeader(this, key, value) - return this - } + // 2. Return the percent-decoding of bytes. + return percentDecode(bytes) +} - static [kHTTP1BuildRequest] (origin, opts, handler) { - // TODO: Migrate header parsing here, to make Requests - // HTTP agnostic - return new Request(origin, opts, handler) - } +// https://url.spec.whatwg.org/#percent-decode +/** @param {Uint8Array} input */ +function percentDecode (input) { + // 1. Let output be an empty byte sequence. + /** @type {number[]} */ + const output = [] - static [kHTTP2BuildRequest] (origin, opts, handler) { - const headers = opts.headers - opts = { ...opts, headers: null } + // 2. For each byte byte in input: + for (let i = 0; i < input.length; i++) { + const byte = input[i] - const request = new Request(origin, opts, handler) + // 1. If byte is not 0x25 (%), then append byte to output. + if (byte !== 0x25) { + output.push(byte) - request.headers = {} + // 2. Otherwise, if byte is 0x25 (%) and the next two bytes + // after byte in input are not in the ranges + // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), + // and 0x61 (a) to 0x66 (f), all inclusive, append byte + // to output. + } else if ( + byte === 0x25 && + !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) + ) { + output.push(0x25) - if (Array.isArray(headers)) { - if (headers.length % 2 !== 0) { - throw new InvalidArgumentError('headers array must be even') - } - for (let i = 0; i < headers.length; i += 2) { - processHeader(request, headers[i], headers[i + 1], true) - } - } else if (headers && typeof headers === 'object') { - const keys = Object.keys(headers) - for (let i = 0; i < keys.length; i++) { - const key = keys[i] - processHeader(request, key, headers[key], true) - } - } else if (headers != null) { - throw new InvalidArgumentError('headers must be an object or an array') - } + // 3. Otherwise: + } else { + // 1. Let bytePoint be the two bytes after byte in input, + // decoded, and then interpreted as hexadecimal number. + const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) + const bytePoint = Number.parseInt(nextTwoBytes, 16) - return request + // 2. Append a byte whose value is bytePoint to output. + output.push(bytePoint) + + // 3. Skip the next two bytes in input. + i += 2 + } } - static [kHTTP2CopyHeaders] (raw) { - const rawHeaders = raw.split('\r\n') - const headers = {} + // 3. Return output. + return Uint8Array.from(output) +} - for (const header of rawHeaders) { - const [key, value] = header.split(': ') +// https://mimesniff.spec.whatwg.org/#parse-a-mime-type +/** @param {string} input */ +function parseMIMEType (input) { + // 1. Remove any leading and trailing HTTP whitespace + // from input. + input = removeHTTPWhitespace(input, true, true) - if (value == null || value.length === 0) continue + // 2. Let position be a position variable for input, + // initially pointing at the start of input. + const position = { position: 0 } - if (headers[key]) headers[key] += `,${value}` - else headers[key] = value - } + // 3. Let type be the result of collecting a sequence + // of code points that are not U+002F (/) from + // input, given position. + const type = collectASequenceOfCodePointsFast( + '/', + input, + position + ) - return headers + // 4. If type is the empty string or does not solely + // contain HTTP token code points, then return failure. + // https://mimesniff.spec.whatwg.org/#http-token-code-point + if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { + return 'failure' } -} -function processHeaderValue (key, val, skipAppend) { - if (val && typeof val === 'object') { - throw new InvalidArgumentError(`invalid ${key} header`) + // 5. If position is past the end of input, then return + // failure + if (position.position > input.length) { + return 'failure' } - val = val != null ? `${val}` : '' + // 6. Advance position by 1. (This skips past U+002F (/).) + position.position++ - if (headerCharRegex.exec(val) !== null) { - throw new InvalidArgumentError(`invalid ${key} header`) - } + // 7. Let subtype be the result of collecting a sequence of + // code points that are not U+003B (;) from input, given + // position. + let subtype = collectASequenceOfCodePointsFast( + ';', + input, + position + ) - return skipAppend ? val : `${key}: ${val}\r\n` -} + // 8. Remove any trailing HTTP whitespace from subtype. + subtype = removeHTTPWhitespace(subtype, false, true) -function processHeader (request, key, val, skipAppend = false) { - if (val && (typeof val === 'object' && !Array.isArray(val))) { - throw new InvalidArgumentError(`invalid ${key} header`) - } else if (val === undefined) { - return + // 9. If subtype is the empty string or does not solely + // contain HTTP token code points, then return failure. + if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { + return 'failure' } - if ( - request.host === null && - key.length === 4 && - key.toLowerCase() === 'host' - ) { - if (headerCharRegex.exec(val) !== null) { - throw new InvalidArgumentError(`invalid ${key} header`) - } - // Consumed by Client - request.host = val - } else if ( - request.contentLength === null && - key.length === 14 && - key.toLowerCase() === 'content-length' - ) { - request.contentLength = parseInt(val, 10) - if (!Number.isFinite(request.contentLength)) { - throw new InvalidArgumentError('invalid content-length header') - } - } else if ( - request.contentType === null && - key.length === 12 && - key.toLowerCase() === 'content-type' - ) { - request.contentType = val - if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) - else request.headers += processHeaderValue(key, val) - } else if ( - key.length === 17 && - key.toLowerCase() === 'transfer-encoding' - ) { - throw new InvalidArgumentError('invalid transfer-encoding header') - } else if ( - key.length === 10 && - key.toLowerCase() === 'connection' - ) { - const value = typeof val === 'string' ? val.toLowerCase() : null - if (value !== 'close' && value !== 'keep-alive') { - throw new InvalidArgumentError('invalid connection header') - } else if (value === 'close') { - request.reset = true - } - } else if ( - key.length === 10 && - key.toLowerCase() === 'keep-alive' - ) { - throw new InvalidArgumentError('invalid keep-alive header') - } else if ( - key.length === 7 && - key.toLowerCase() === 'upgrade' - ) { - throw new InvalidArgumentError('invalid upgrade header') - } else if ( - key.length === 6 && - key.toLowerCase() === 'expect' - ) { - throw new NotSupportedError('expect header not supported') - } else if (tokenRegExp.exec(key) === null) { - throw new InvalidArgumentError('invalid header key') - } else { - if (Array.isArray(val)) { - for (let i = 0; i < val.length; i++) { - if (skipAppend) { - if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` - else request.headers[key] = processHeaderValue(key, val[i], skipAppend) - } else { - request.headers += processHeaderValue(key, val[i]) - } - } - } else { - if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) - else request.headers += processHeaderValue(key, val) - } - } -} + const typeLowercase = type.toLowerCase() + const subtypeLowercase = subtype.toLowerCase() -module.exports = Request + // 10. Let mimeType be a new MIME type record whose type + // is type, in ASCII lowercase, and subtype is subtype, + // in ASCII lowercase. + // https://mimesniff.spec.whatwg.org/#mime-type + const mimeType = { + type: typeLowercase, + subtype: subtypeLowercase, + /** @type {Map} */ + parameters: new Map(), + // https://mimesniff.spec.whatwg.org/#mime-type-essence + essence: `${typeLowercase}/${subtypeLowercase}` + } + // 11. While position is not past the end of input: + while (position.position < input.length) { + // 1. Advance position by 1. (This skips past U+003B (;).) + position.position++ -/***/ }), + // 2. Collect a sequence of code points that are HTTP + // whitespace from input given position. + collectASequenceOfCodePoints( + // https://fetch.spec.whatwg.org/#http-whitespace + char => HTTP_WHITESPACE_REGEX.test(char), + input, + position + ) -/***/ 2785: -/***/ ((module) => { + // 3. Let parameterName be the result of collecting a + // sequence of code points that are not U+003B (;) + // or U+003D (=) from input, given position. + let parameterName = collectASequenceOfCodePoints( + (char) => char !== ';' && char !== '=', + input, + position + ) -module.exports = { - kClose: Symbol('close'), - kDestroy: Symbol('destroy'), - kDispatch: Symbol('dispatch'), - kUrl: Symbol('url'), - kWriting: Symbol('writing'), - kResuming: Symbol('resuming'), - kQueue: Symbol('queue'), - kConnect: Symbol('connect'), - kConnecting: Symbol('connecting'), - kHeadersList: Symbol('headers list'), - kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), - kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), - kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), - kKeepAliveTimeoutValue: Symbol('keep alive timeout'), - kKeepAlive: Symbol('keep alive'), - kHeadersTimeout: Symbol('headers timeout'), - kBodyTimeout: Symbol('body timeout'), - kServerName: Symbol('server name'), - kLocalAddress: Symbol('local address'), - kHost: Symbol('host'), - kNoRef: Symbol('no ref'), - kBodyUsed: Symbol('used'), - kRunning: Symbol('running'), - kBlocking: Symbol('blocking'), - kPending: Symbol('pending'), - kSize: Symbol('size'), - kBusy: Symbol('busy'), - kQueued: Symbol('queued'), - kFree: Symbol('free'), - kConnected: Symbol('connected'), - kClosed: Symbol('closed'), - kNeedDrain: Symbol('need drain'), - kReset: Symbol('reset'), - kDestroyed: Symbol.for('nodejs.stream.destroyed'), - kMaxHeadersSize: Symbol('max headers size'), - kRunningIdx: Symbol('running index'), - kPendingIdx: Symbol('pending index'), - kError: Symbol('error'), - kClients: Symbol('clients'), - kClient: Symbol('client'), - kParser: Symbol('parser'), - kOnDestroyed: Symbol('destroy callbacks'), - kPipelining: Symbol('pipelining'), - kSocket: Symbol('socket'), - kHostHeader: Symbol('host header'), - kConnector: Symbol('connector'), - kStrictContentLength: Symbol('strict content length'), - kMaxRedirections: Symbol('maxRedirections'), - kMaxRequests: Symbol('maxRequestsPerClient'), - kProxy: Symbol('proxy agent options'), - kCounter: Symbol('socket request counter'), - kInterceptors: Symbol('dispatch interceptors'), - kMaxResponseSize: Symbol('max response size'), - kHTTP2Session: Symbol('http2Session'), - kHTTP2SessionState: Symbol('http2Session state'), - kHTTP2BuildRequest: Symbol('http2 build request'), - kHTTP1BuildRequest: Symbol('http1 build request'), - kHTTP2CopyHeaders: Symbol('http2 copy headers'), - kHTTPConnVersion: Symbol('http connection version'), - kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), - kConstruct: Symbol('constructable') -} + // 4. Set parameterName to parameterName, in ASCII + // lowercase. + parameterName = parameterName.toLowerCase() + // 5. If position is not past the end of input, then: + if (position.position < input.length) { + // 1. If the code point at position within input is + // U+003B (;), then continue. + if (input[position.position] === ';') { + continue + } -/***/ }), + // 2. Advance position by 1. (This skips past U+003D (=).) + position.position++ + } -/***/ 3983: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 6. If position is past the end of input, then break. + if (position.position > input.length) { + break + } -"use strict"; + // 7. Let parameterValue be null. + let parameterValue = null + // 8. If the code point at position within input is + // U+0022 ("), then: + if (input[position.position] === '"') { + // 1. Set parameterValue to the result of collecting + // an HTTP quoted string from input, given position + // and the extract-value flag. + parameterValue = collectAnHTTPQuotedString(input, position, true) -const assert = __nccwpck_require__(9491) -const { kDestroyed, kBodyUsed } = __nccwpck_require__(2785) -const { IncomingMessage } = __nccwpck_require__(3685) -const stream = __nccwpck_require__(2781) -const net = __nccwpck_require__(1808) -const { InvalidArgumentError } = __nccwpck_require__(8045) -const { Blob } = __nccwpck_require__(4300) -const nodeUtil = __nccwpck_require__(3837) -const { stringify } = __nccwpck_require__(3477) + // 2. Collect a sequence of code points that are not + // U+003B (;) from input, given position. + collectASequenceOfCodePointsFast( + ';', + input, + position + ) -const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) + // 9. Otherwise: + } else { + // 1. Set parameterValue to the result of collecting + // a sequence of code points that are not U+003B (;) + // from input, given position. + parameterValue = collectASequenceOfCodePointsFast( + ';', + input, + position + ) -function nop () {} + // 2. Remove any trailing HTTP whitespace from parameterValue. + parameterValue = removeHTTPWhitespace(parameterValue, false, true) -function isStream (obj) { - return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' -} + // 3. If parameterValue is the empty string, then continue. + if (parameterValue.length === 0) { + continue + } + } -// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) -function isBlobLike (object) { - return (Blob && object instanceof Blob) || ( - object && - typeof object === 'object' && - (typeof object.stream === 'function' || - typeof object.arrayBuffer === 'function') && - /^(Blob|File)$/.test(object[Symbol.toStringTag]) - ) + // 10. If all of the following are true + // - parameterName is not the empty string + // - parameterName solely contains HTTP token code points + // - parameterValue solely contains HTTP quoted-string token code points + // - mimeType’s parameters[parameterName] does not exist + // then set mimeType’s parameters[parameterName] to parameterValue. + if ( + parameterName.length !== 0 && + HTTP_TOKEN_CODEPOINTS.test(parameterName) && + (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && + !mimeType.parameters.has(parameterName) + ) { + mimeType.parameters.set(parameterName, parameterValue) + } + } + + // 12. Return mimeType. + return mimeType } -function buildURL (url, queryParams) { - if (url.includes('?') || url.includes('#')) { - throw new Error('Query params cannot be passed when url already contains "?" or "#".') +// https://infra.spec.whatwg.org/#forgiving-base64-decode +/** @param {string} data */ +function forgivingBase64 (data) { + // 1. Remove all ASCII whitespace from data. + data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line + + // 2. If data’s code point length divides by 4 leaving + // no remainder, then: + if (data.length % 4 === 0) { + // 1. If data ends with one or two U+003D (=) code points, + // then remove them from data. + data = data.replace(/=?=$/, '') } - const stringified = stringify(queryParams) + // 3. If data’s code point length divides by 4 leaving + // a remainder of 1, then return failure. + if (data.length % 4 === 1) { + return 'failure' + } - if (stringified) { - url += '?' + stringified + // 4. If data contains a code point that is not one of + // U+002B (+) + // U+002F (/) + // ASCII alphanumeric + // then return failure. + if (/[^+/0-9A-Za-z]/.test(data)) { + return 'failure' } - return url + const binary = atob(data) + const bytes = new Uint8Array(binary.length) + + for (let byte = 0; byte < binary.length; byte++) { + bytes[byte] = binary.charCodeAt(byte) + } + + return bytes } -function parseURL (url) { - if (typeof url === 'string') { - url = new URL(url) +// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string +// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string +/** + * @param {string} input + * @param {{ position: number }} position + * @param {boolean?} extractValue + */ +function collectAnHTTPQuotedString (input, position, extractValue) { + // 1. Let positionStart be position. + const positionStart = position.position - if (!/^https?:/.test(url.origin || url.protocol)) { - throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') - } + // 2. Let value be the empty string. + let value = '' - return url - } + // 3. Assert: the code point at position within input + // is U+0022 ("). + assert(input[position.position] === '"') - if (!url || typeof url !== 'object') { - throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') - } + // 4. Advance position by 1. + position.position++ - if (!/^https?:/.test(url.origin || url.protocol)) { - throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') - } + // 5. While true: + while (true) { + // 1. Append the result of collecting a sequence of code points + // that are not U+0022 (") or U+005C (\) from input, given + // position, to value. + value += collectASequenceOfCodePoints( + (char) => char !== '"' && char !== '\\', + input, + position + ) - if (!(url instanceof URL)) { - if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { - throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') + // 2. If position is past the end of input, then break. + if (position.position >= input.length) { + break } - if (url.path != null && typeof url.path !== 'string') { - throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') - } + // 3. Let quoteOrBackslash be the code point at position within + // input. + const quoteOrBackslash = input[position.position] - if (url.pathname != null && typeof url.pathname !== 'string') { - throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') - } + // 4. Advance position by 1. + position.position++ - if (url.hostname != null && typeof url.hostname !== 'string') { - throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') - } + // 5. If quoteOrBackslash is U+005C (\), then: + if (quoteOrBackslash === '\\') { + // 1. If position is past the end of input, then append + // U+005C (\) to value and break. + if (position.position >= input.length) { + value += '\\' + break + } - if (url.origin != null && typeof url.origin !== 'string') { - throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') - } + // 2. Append the code point at position within input to value. + value += input[position.position] - const port = url.port != null - ? url.port - : (url.protocol === 'https:' ? 443 : 80) - let origin = url.origin != null - ? url.origin - : `${url.protocol}//${url.hostname}:${port}` - let path = url.path != null - ? url.path - : `${url.pathname || ''}${url.search || ''}` + // 3. Advance position by 1. + position.position++ - if (origin.endsWith('/')) { - origin = origin.substring(0, origin.length - 1) - } + // 6. Otherwise: + } else { + // 1. Assert: quoteOrBackslash is U+0022 ("). + assert(quoteOrBackslash === '"') - if (path && !path.startsWith('/')) { - path = `/${path}` + // 2. Break. + break } - // new URL(path, origin) is unsafe when `path` contains an absolute URL - // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: - // If first parameter is a relative URL, second param is required, and will be used as the base URL. - // If first parameter is an absolute URL, a given second param will be ignored. - url = new URL(origin + path) } - return url + // 6. If the extract-value flag is set, then return value. + if (extractValue) { + return value + } + + // 7. Return the code points from positionStart to position, + // inclusive, within input. + return input.slice(positionStart, position.position) } -function parseOrigin (url) { - url = parseURL(url) +/** + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +function serializeAMimeType (mimeType) { + assert(mimeType !== 'failure') + const { parameters, essence } = mimeType - if (url.pathname !== '/' || url.search || url.hash) { - throw new InvalidArgumentError('invalid url') - } + // 1. Let serialization be the concatenation of mimeType’s + // type, U+002F (/), and mimeType’s subtype. + let serialization = essence - return url -} + // 2. For each name → value of mimeType’s parameters: + for (let [name, value] of parameters.entries()) { + // 1. Append U+003B (;) to serialization. + serialization += ';' -function getHostname (host) { - if (host[0] === '[') { - const idx = host.indexOf(']') + // 2. Append name to serialization. + serialization += name - assert(idx !== -1) - return host.substring(1, idx) + // 3. Append U+003D (=) to serialization. + serialization += '=' + + // 4. If value does not solely contain HTTP token code + // points or value is the empty string, then: + if (!HTTP_TOKEN_CODEPOINTS.test(value)) { + // 1. Precede each occurence of U+0022 (") or + // U+005C (\) in value with U+005C (\). + value = value.replace(/(\\|")/g, '\\$1') + + // 2. Prepend U+0022 (") to value. + value = '"' + value + + // 3. Append U+0022 (") to value. + value += '"' + } + + // 5. Append value to serialization. + serialization += value } - const idx = host.indexOf(':') - if (idx === -1) return host + // 3. Return serialization. + return serialization +} - return host.substring(0, idx) +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} char + */ +function isHTTPWhiteSpace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === ' ' } -// IP addresses are not valid server names per RFC6066 -// > Currently, the only server names supported are DNS hostnames -function getServerName (host) { - if (!host) { - return null - } +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} str + */ +function removeHTTPWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 - assert.strictEqual(typeof host, 'string') + if (leading) { + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); + } - const servername = getHostname(host) - if (net.isIP(servername)) { - return '' + if (trailing) { + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); } - return servername + return str.slice(lead, trail + 1) } -function deepClone (obj) { - return JSON.parse(JSON.stringify(obj)) +/** + * @see https://infra.spec.whatwg.org/#ascii-whitespace + * @param {string} char + */ +function isASCIIWhitespace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' } -function isAsyncIterable (obj) { - return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') -} +/** + * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace + */ +function removeASCIIWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 -function isIterable (obj) { - return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) -} + if (leading) { + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); + } -function bodyLength (body) { - if (body == null) { - return 0 - } else if (isStream(body)) { - const state = body._readableState - return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) - ? state.length - : null - } else if (isBlobLike(body)) { - return body.size != null ? body.size : null - } else if (isBuffer(body)) { - return body.byteLength + if (trailing) { + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); } - return null + return str.slice(lead, trail + 1) } -function isDestroyed (stream) { - return !stream || !!(stream.destroyed || stream[kDestroyed]) +module.exports = { + dataURLProcessor, + URLSerializer, + collectASequenceOfCodePoints, + collectASequenceOfCodePointsFast, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString, + serializeAMimeType } -function isReadableAborted (stream) { - const state = stream && stream._readableState - return isDestroyed(stream) && state && !state.endEmitted -} -function destroy (stream, err) { - if (stream == null || !isStream(stream) || isDestroyed(stream)) { - return - } +/***/ }), - if (typeof stream.destroy === 'function') { - if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { - // See: https://github.com/nodejs/node/pull/38505/files - stream.socket = null +/***/ 78511: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { Blob, File: NativeFile } = __nccwpck_require__(14300) +const { types } = __nccwpck_require__(73837) +const { kState } = __nccwpck_require__(15861) +const { isBlobLike } = __nccwpck_require__(52538) +const { webidl } = __nccwpck_require__(21744) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +const { kEnumerableProperty } = __nccwpck_require__(83983) +const encoder = new TextEncoder() + +class File extends Blob { + constructor (fileBits, fileName, options = {}) { + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + + fileBits = webidl.converters['sequence'](fileBits) + fileName = webidl.converters.USVString(fileName) + options = webidl.converters.FilePropertyBag(options) + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + // Note: Blob handles this for us + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // 2. Convert every character in t to ASCII lowercase. + let t = options.type + let d + + // eslint-disable-next-line no-labels + substep: { + if (t) { + t = parseMIMEType(t) + + if (t === 'failure') { + t = '' + // eslint-disable-next-line no-labels + break substep + } + + t = serializeAMimeType(t).toLowerCase() + } + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + d = options.lastModified } - stream.destroy(err) - } else if (err) { - process.nextTick((stream, err) => { - stream.emit('error', err) - }, stream, err) + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + super(processBlobParts(fileBits, options), { type: t }) + this[kState] = { + name: n, + lastModified: d, + type: t + } } - if (stream.destroyed !== true) { - stream[kDestroyed] = true + get name () { + webidl.brandCheck(this, File) + + return this[kState].name } -} -const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ -function parseKeepAliveTimeout (val) { - const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) - return m ? parseInt(m[1], 10) * 1000 : null + get lastModified () { + webidl.brandCheck(this, File) + + return this[kState].lastModified + } + + get type () { + webidl.brandCheck(this, File) + + return this[kState].type + } } -function parseHeaders (headers, obj = {}) { - // For H2 support - if (!Array.isArray(headers)) return headers +class FileLike { + constructor (blobLike, fileName, options = {}) { + // TODO: argument idl type check - for (let i = 0; i < headers.length; i += 2) { - const key = headers[i].toString().toLowerCase() - let val = obj[key] + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // TODO + const t = options.type + + // 2. Convert every character in t to ASCII lowercase. + // TODO + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + const d = options.lastModified ?? Date.now() + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. - if (!val) { - if (Array.isArray(headers[i + 1])) { - obj[key] = headers[i + 1].map(x => x.toString('utf8')) - } else { - obj[key] = headers[i + 1].toString('utf8') - } - } else { - if (!Array.isArray(val)) { - val = [val] - obj[key] = val - } - val.push(headers[i + 1].toString('utf8')) + this[kState] = { + blobLike, + name: n, + type: t, + lastModified: d } } - // See https://github.com/nodejs/node/pull/46528 - if ('content-length' in obj && 'content-disposition' in obj) { - obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') + stream (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.stream(...args) } - return obj -} + arrayBuffer (...args) { + webidl.brandCheck(this, FileLike) -function parseRawHeaders (headers) { - const ret = [] - let hasContentLength = false - let contentDispositionIdx = -1 + return this[kState].blobLike.arrayBuffer(...args) + } - for (let n = 0; n < headers.length; n += 2) { - const key = headers[n + 0].toString() - const val = headers[n + 1].toString('utf8') + slice (...args) { + webidl.brandCheck(this, FileLike) - if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { - ret.push(key, val) - hasContentLength = true - } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { - contentDispositionIdx = ret.push(key, val) - 1 - } else { - ret.push(key, val) - } + return this[kState].blobLike.slice(...args) } - // See https://github.com/nodejs/node/pull/46528 - if (hasContentLength && contentDispositionIdx !== -1) { - ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + text (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.text(...args) } - return ret -} + get size () { + webidl.brandCheck(this, FileLike) -function isBuffer (buffer) { - // See, https://github.com/mcollina/undici/pull/319 - return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) -} + return this[kState].blobLike.size + } -function validateHandler (handler, method, upgrade) { - if (!handler || typeof handler !== 'object') { - throw new InvalidArgumentError('handler must be an object') + get type () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.type } - if (typeof handler.onConnect !== 'function') { - throw new InvalidArgumentError('invalid onConnect method') + get name () { + webidl.brandCheck(this, FileLike) + + return this[kState].name } - if (typeof handler.onError !== 'function') { - throw new InvalidArgumentError('invalid onError method') + get lastModified () { + webidl.brandCheck(this, FileLike) + + return this[kState].lastModified } - if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { - throw new InvalidArgumentError('invalid onBodySent method') + get [Symbol.toStringTag] () { + return 'File' } +} - if (upgrade || method === 'CONNECT') { - if (typeof handler.onUpgrade !== 'function') { - throw new InvalidArgumentError('invalid onUpgrade method') - } - } else { - if (typeof handler.onHeaders !== 'function') { - throw new InvalidArgumentError('invalid onHeaders method') - } +Object.defineProperties(File.prototype, { + [Symbol.toStringTag]: { + value: 'File', + configurable: true + }, + name: kEnumerableProperty, + lastModified: kEnumerableProperty +}) - if (typeof handler.onData !== 'function') { - throw new InvalidArgumentError('invalid onData method') +webidl.converters.Blob = webidl.interfaceConverter(Blob) + +webidl.converters.BlobPart = function (V, opts) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) } - if (typeof handler.onComplete !== 'function') { - throw new InvalidArgumentError('invalid onComplete method') + if ( + ArrayBuffer.isView(V) || + types.isAnyArrayBuffer(V) + ) { + return webidl.converters.BufferSource(V, opts) } } -} -// A body is disturbed if it has been read from and it cannot -// be re-used without losing state or data. -function isDisturbed (body) { - return !!(body && ( - stream.isDisturbed - ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? - : body[kBodyUsed] || - body.readableDidRead || - (body._readableState && body._readableState.dataEmitted) || - isReadableAborted(body) - )) + return webidl.converters.USVString(V, opts) } -function isErrored (body) { - return !!(body && ( - stream.isErrored - ? stream.isErrored(body) - : /state: 'errored'/.test(nodeUtil.inspect(body) - ))) -} +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.BlobPart +) -function isReadable (body) { - return !!(body && ( - stream.isReadable - ? stream.isReadable(body) - : /state: 'readable'/.test(nodeUtil.inspect(body) - ))) -} +// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag +webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ + { + key: 'lastModified', + converter: webidl.converters['long long'], + get defaultValue () { + return Date.now() + } + }, + { + key: 'type', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'endings', + converter: (value) => { + value = webidl.converters.DOMString(value) + value = value.toLowerCase() -function getSocketInfo (socket) { - return { - localAddress: socket.localAddress, - localPort: socket.localPort, - remoteAddress: socket.remoteAddress, - remotePort: socket.remotePort, - remoteFamily: socket.remoteFamily, - timeout: socket.timeout, - bytesWritten: socket.bytesWritten, - bytesRead: socket.bytesRead - } -} + if (value !== 'native') { + value = 'transparent' + } -async function * convertIterableToBuffer (iterable) { - for await (const chunk of iterable) { - yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + return value + }, + defaultValue: 'transparent' } -} +]) -let ReadableStream -function ReadableStreamFrom (iterable) { - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) - } +/** + * @see https://www.w3.org/TR/FileAPI/#process-blob-parts + * @param {(NodeJS.TypedArray|Blob|string)[]} parts + * @param {{ type: string, endings: string }} options + */ +function processBlobParts (parts, options) { + // 1. Let bytes be an empty sequence of bytes. + /** @type {NodeJS.TypedArray[]} */ + const bytes = [] - if (ReadableStream.from) { - return ReadableStream.from(convertIterableToBuffer(iterable)) - } + // 2. For each element in parts: + for (const element of parts) { + // 1. If element is a USVString, run the following substeps: + if (typeof element === 'string') { + // 1. Let s be element. + let s = element - let iterator - return new ReadableStream( - { - async start () { - iterator = iterable[Symbol.asyncIterator]() - }, - async pull (controller) { - const { done, value } = await iterator.next() - if (done) { - queueMicrotask(() => { - controller.close() - }) - } else { - const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) - controller.enqueue(new Uint8Array(buf)) - } - return controller.desiredSize > 0 - }, - async cancel (reason) { - await iterator.return() + // 2. If the endings member of options is "native", set s + // to the result of converting line endings to native + // of element. + if (options.endings === 'native') { + s = convertLineEndingsNative(s) } - }, - 0 - ) -} - -// The chunk should be a FormData instance and contains -// all the required methods. -function isFormDataLike (object) { - return ( - object && - typeof object === 'object' && - typeof object.append === 'function' && - typeof object.delete === 'function' && - typeof object.get === 'function' && - typeof object.getAll === 'function' && - typeof object.has === 'function' && - typeof object.set === 'function' && - object[Symbol.toStringTag] === 'FormData' - ) -} -function throwIfAborted (signal) { - if (!signal) { return } - if (typeof signal.throwIfAborted === 'function') { - signal.throwIfAborted() - } else { - if (signal.aborted) { - // DOMException not available < v17.0.0 - const err = new Error('The operation was aborted') - err.name = 'AbortError' - throw err + // 3. Append the result of UTF-8 encoding s to bytes. + bytes.push(encoder.encode(s)) + } else if ( + types.isAnyArrayBuffer(element) || + types.isTypedArray(element) + ) { + // 2. If element is a BufferSource, get a copy of the + // bytes held by the buffer source, and append those + // bytes to bytes. + if (!element.buffer) { // ArrayBuffer + bytes.push(new Uint8Array(element)) + } else { + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) + } + } else if (isBlobLike(element)) { + // 3. If element is a Blob, append the bytes it represents + // to bytes. + bytes.push(element) } } -} -function addAbortListener (signal, listener) { - if ('addEventListener' in signal) { - signal.addEventListener('abort', listener, { once: true }) - return () => signal.removeEventListener('abort', listener) - } - signal.addListener('abort', listener) - return () => signal.removeListener('abort', listener) + // 3. Return bytes. + return bytes } -const hasToWellFormed = !!String.prototype.toWellFormed - /** - * @param {string} val + * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native + * @param {string} s */ -function toUSVString (val) { - if (hasToWellFormed) { - return `${val}`.toWellFormed() - } else if (nodeUtil.toUSVString) { - return nodeUtil.toUSVString(val) +function convertLineEndingsNative (s) { + // 1. Let native line ending be be the code point U+000A LF. + let nativeLineEnding = '\n' + + // 2. If the underlying platform’s conventions are to + // represent newlines as a carriage return and line feed + // sequence, set native line ending to the code point + // U+000D CR followed by the code point U+000A LF. + if (process.platform === 'win32') { + nativeLineEnding = '\r\n' } - return `${val}` + return s.replace(/\r?\n/g, nativeLineEnding) } -// Parsed accordingly to RFC 9110 -// https://www.rfc-editor.org/rfc/rfc9110#field.content-range -function parseRangeHeader (range) { - if (range == null || range === '') return { start: 0, end: null, size: null } - - const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null - return m - ? { - start: parseInt(m[1]), - end: m[2] ? parseInt(m[2]) : null, - size: m[3] ? parseInt(m[3]) : null - } - : null +// If this function is moved to ./util.js, some tools (such as +// rollup) will warn about circular dependencies. See: +// https://github.com/nodejs/undici/issues/1629 +function isFileLike (object) { + return ( + (NativeFile && object instanceof NativeFile) || + object instanceof File || ( + object && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + object[Symbol.toStringTag] === 'File' + ) + ) } -const kEnumerableProperty = Object.create(null) -kEnumerableProperty.enumerable = true - -module.exports = { - kEnumerableProperty, - nop, - isDisturbed, - isErrored, - isReadable, - toUSVString, - isReadableAborted, - isBlobLike, - parseOrigin, - parseURL, - getServerName, - isStream, - isIterable, - isAsyncIterable, - isDestroyed, - parseRawHeaders, - parseHeaders, - parseKeepAliveTimeout, - destroy, - bodyLength, - deepClone, - ReadableStreamFrom, - isBuffer, - validateHandler, - getSocketInfo, - isFormDataLike, - buildURL, - throwIfAborted, - addAbortListener, - parseRangeHeader, - nodeMajor, - nodeMinor, - nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), - safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] -} +module.exports = { File, FileLike, isFileLike } /***/ }), -/***/ 4839: +/***/ 72015: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const Dispatcher = __nccwpck_require__(412) -const { - ClientDestroyedError, - ClientClosedError, - InvalidArgumentError -} = __nccwpck_require__(8045) -const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(2785) +const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(52538) +const { kState } = __nccwpck_require__(15861) +const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(78511) +const { webidl } = __nccwpck_require__(21744) +const { Blob, File: NativeFile } = __nccwpck_require__(14300) -const kDestroyed = Symbol('destroyed') -const kClosed = Symbol('closed') -const kOnDestroyed = Symbol('onDestroyed') -const kOnClosed = Symbol('onClosed') -const kInterceptedDispatch = Symbol('Intercepted Dispatch') +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile -class DispatcherBase extends Dispatcher { - constructor () { - super() +// https://xhr.spec.whatwg.org/#formdata +class FormData { + constructor (form) { + if (form !== undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'FormData constructor', + argument: 'Argument 1', + types: ['undefined'] + }) + } - this[kDestroyed] = false - this[kOnDestroyed] = null - this[kClosed] = false - this[kOnClosed] = [] + this[kState] = [] } - get destroyed () { - return this[kDestroyed] - } + append (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) - get closed () { - return this[kClosed] + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? webidl.converters.USVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with + // name, value, and filename if given. + const entry = makeEntry(name, value, filename) + + // 3. Append entry to this’s entry list. + this[kState].push(entry) } - get interceptors () { - return this[kInterceptors] + delete (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + + name = webidl.converters.USVString(name) + + // The delete(name) method steps are to remove all entries whose name + // is name from this’s entry list. + this[kState] = this[kState].filter(entry => entry.name !== name) } - set interceptors (newInterceptors) { - if (newInterceptors) { - for (let i = newInterceptors.length - 1; i >= 0; i--) { - const interceptor = this[kInterceptors][i] - if (typeof interceptor !== 'function') { - throw new InvalidArgumentError('interceptor must be an function') - } - } + get (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return null. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx === -1) { + return null } - this[kInterceptors] = newInterceptors + // 2. Return the value of the first entry whose name is name from + // this’s entry list. + return this[kState][idx].value + } + + getAll (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return the empty list. + // 2. Return the values of all entries whose name is name, in order, + // from this’s entry list. + return this[kState] + .filter((entry) => entry.name === name) + .map((entry) => entry.value) } - close (callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - this.close((err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } + has (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) + + name = webidl.converters.USVString(name) + + // The has(name) method steps are to return true if there is an entry + // whose name is name in this’s entry list; otherwise false. + return this[kState].findIndex((entry) => entry.name === name) !== -1 + } - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } + set (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) - if (this[kDestroyed]) { - queueMicrotask(() => callback(new ClientDestroyedError(), null)) - return - } + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) - if (this[kClosed]) { - if (this[kOnClosed]) { - this[kOnClosed].push(callback) - } else { - queueMicrotask(() => callback(null, null)) - } - return + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" + ) } - this[kClosed] = true - this[kOnClosed].push(callback) + // The set(name, value) and set(name, blobValue, filename) method steps + // are: - const onClosed = () => { - const callbacks = this[kOnClosed] - this[kOnClosed] = null - for (let i = 0; i < callbacks.length; i++) { - callbacks[i](null, null) - } - } + // 1. Let value be value if given; otherwise blobValue. - // Should not error. - this[kClose]() - .then(() => this.destroy()) - .then(() => { - queueMicrotask(onClosed) - }) - } + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? toUSVString(filename) + : undefined - destroy (err, callback) { - if (typeof err === 'function') { - callback = err - err = null - } + // 2. Let entry be the result of creating an entry with name, value, and + // filename if given. + const entry = makeEntry(name, value, filename) - if (callback === undefined) { - return new Promise((resolve, reject) => { - this.destroy(err, (err, data) => { - return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) - }) - }) + // 3. If there are entries in this’s entry list whose name is name, then + // replace the first such entry with entry and remove the others. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx !== -1) { + this[kState] = [ + ...this[kState].slice(0, idx), + entry, + ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) + ] + } else { + // 4. Otherwise, append entry to this’s entry list. + this[kState].push(entry) } + } - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } + entries () { + webidl.brandCheck(this, FormData) - if (this[kDestroyed]) { - if (this[kOnDestroyed]) { - this[kOnDestroyed].push(callback) - } else { - queueMicrotask(() => callback(null, null)) - } - return - } + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key+value' + ) + } - if (!err) { - err = new ClientDestroyedError() - } + keys () { + webidl.brandCheck(this, FormData) - this[kDestroyed] = true - this[kOnDestroyed] = this[kOnDestroyed] || [] - this[kOnDestroyed].push(callback) + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key' + ) + } - const onDestroyed = () => { - const callbacks = this[kOnDestroyed] - this[kOnDestroyed] = null - for (let i = 0; i < callbacks.length; i++) { - callbacks[i](null, null) - } - } + values () { + webidl.brandCheck(this, FormData) - // Should not error. - this[kDestroy](err).then(() => { - queueMicrotask(onDestroyed) - }) + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'value' + ) } - [kInterceptedDispatch] (opts, handler) { - if (!this[kInterceptors] || this[kInterceptors].length === 0) { - this[kInterceptedDispatch] = this[kDispatch] - return this[kDispatch](opts, handler) + /** + * @param {(value: string, key: string, self: FormData) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." + ) } - let dispatch = this[kDispatch].bind(this) - for (let i = this[kInterceptors].length - 1; i >= 0; i--) { - dispatch = this[kInterceptors][i](dispatch) + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) } - this[kInterceptedDispatch] = dispatch - return dispatch(opts, handler) } +} - dispatch (opts, handler) { - if (!handler || typeof handler !== 'object') { - throw new InvalidArgumentError('handler must be an object') - } +FormData.prototype[Symbol.iterator] = FormData.prototype.entries - try { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('opts must be an object.') - } +Object.defineProperties(FormData.prototype, { + [Symbol.toStringTag]: { + value: 'FormData', + configurable: true + } +}) - if (this[kDestroyed] || this[kOnDestroyed]) { - throw new ClientDestroyedError() - } +/** + * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry + * @param {string} name + * @param {string|Blob} value + * @param {?string} filename + * @returns + */ +function makeEntry (name, value, filename) { + // 1. Set name to the result of converting name into a scalar value string. + // "To convert a string into a scalar value string, replace any surrogates + // with U+FFFD." + // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end + name = Buffer.from(name).toString('utf8') - if (this[kClosed]) { - throw new ClientClosedError() - } + // 2. If value is a string, then set value to the result of converting + // value into a scalar value string. + if (typeof value === 'string') { + value = Buffer.from(value).toString('utf8') + } else { + // 3. Otherwise: - return this[kInterceptedDispatch](opts, handler) - } catch (err) { - if (typeof handler.onError !== 'function') { - throw new InvalidArgumentError('invalid onError method') - } + // 1. If value is not a File object, then set value to a new File object, + // representing the same bytes, whose name attribute value is "blob" + if (!isFileLike(value)) { + value = value instanceof Blob + ? new File([value], 'blob', { type: value.type }) + : new FileLike(value, 'blob', { type: value.type }) + } - handler.onError(err) + // 2. If filename is given, then set value to a new File object, + // representing the same bytes, whose name attribute is filename. + if (filename !== undefined) { + /** @type {FilePropertyBag} */ + const options = { + type: value.type, + lastModified: value.lastModified + } - return false + value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile + ? new File([value], filename, options) + : new FileLike(value, filename, options) } } + + // 4. Return an entry whose name is name and whose value is value. + return { name, value } } -module.exports = DispatcherBase +module.exports = { FormData } /***/ }), -/***/ 412: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 71246: +/***/ ((module) => { "use strict"; -const EventEmitter = __nccwpck_require__(2361) +// In case of breaking changes, increase the version +// number to avoid conflicts. +const globalOrigin = Symbol.for('undici.globalOrigin.1') -class Dispatcher extends EventEmitter { - dispatch () { - throw new Error('not implemented') - } +function getGlobalOrigin () { + return globalThis[globalOrigin] +} - close () { - throw new Error('not implemented') +function setGlobalOrigin (newOrigin) { + if (newOrigin === undefined) { + Object.defineProperty(globalThis, globalOrigin, { + value: undefined, + writable: true, + enumerable: false, + configurable: false + }) + + return } - destroy () { - throw new Error('not implemented') + const parsedURL = new URL(newOrigin) + + if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { + throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) } + + Object.defineProperty(globalThis, globalOrigin, { + value: parsedURL, + writable: true, + enumerable: false, + configurable: false + }) } -module.exports = Dispatcher +module.exports = { + getGlobalOrigin, + setGlobalOrigin +} /***/ }), -/***/ 9990: +/***/ 10554: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch + -const Busboy = __nccwpck_require__(3438) -const util = __nccwpck_require__(3983) +const { kHeadersList, kConstruct } = __nccwpck_require__(72785) +const { kGuard } = __nccwpck_require__(15861) +const { kEnumerableProperty } = __nccwpck_require__(83983) const { - ReadableStreamFrom, - isBlobLike, - isReadableStreamLike, - readableStreamClose, - createDeferredPromise, - fullyReadBody -} = __nccwpck_require__(2538) -const { FormData } = __nccwpck_require__(2015) -const { kState } = __nccwpck_require__(5861) -const { webidl } = __nccwpck_require__(1744) -const { DOMException, structuredClone } = __nccwpck_require__(1037) -const { Blob, File: NativeFile } = __nccwpck_require__(4300) -const { kBodyUsed } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { isErrored } = __nccwpck_require__(3983) -const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) -const { File: UndiciFile } = __nccwpck_require__(8511) -const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) + makeIterator, + isValidHeaderName, + isValidHeaderValue +} = __nccwpck_require__(52538) +const { webidl } = __nccwpck_require__(21744) +const assert = __nccwpck_require__(39491) -let ReadableStream = globalThis.ReadableStream +const kHeadersMap = Symbol('headers map') +const kHeadersSortedMap = Symbol('headers map sorted') -/** @type {globalThis['File']} */ -const File = NativeFile ?? UndiciFile -const textEncoder = new TextEncoder() -const textDecoder = new TextDecoder() +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} -// https://fetch.spec.whatwg.org/#concept-bodyinit-extract -function extractBody (object, keepalive = false) { - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) - } +/** + * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize + * @param {string} potentialValue + */ +function headerValueNormalize (potentialValue) { + // To normalize a byte sequence potentialValue, remove + // any leading and trailing HTTP whitespace bytes from + // potentialValue. + let i = 0; let j = potentialValue.length - // 1. Let stream be null. - let stream = null + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i - // 2. If object is a ReadableStream object, then set stream to object. - if (object instanceof ReadableStream) { - stream = object - } else if (isBlobLike(object)) { - // 3. Otherwise, if object is a Blob object, set stream to the - // result of running object’s get stream. - stream = object.stream() - } else { - // 4. Otherwise, set stream to a new ReadableStream object, and set - // up stream. - stream = new ReadableStream({ - async pull (controller) { - controller.enqueue( - typeof source === 'string' ? textEncoder.encode(source) : source - ) - queueMicrotask(() => readableStreamClose(controller)) - }, - start () {}, - type: undefined - }) - } + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) +} - // 5. Assert: stream is a ReadableStream object. - assert(isReadableStreamLike(stream)) +function fill (headers, object) { + // To fill a Headers object headers with a given object object, run these steps: - // 6. Let action be null. - let action = null + // 1. If object is a sequence, then for each header in object: + // Note: webidl conversion to array has already been done. + if (Array.isArray(object)) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] + // 1. If header does not contain exactly two items, then throw a TypeError. + if (header.length !== 2) { + throw webidl.errors.exception({ + header: 'Headers constructor', + message: `expected name/value pair to be length 2, found ${header.length}.` + }) + } - // 7. Let source be null. - let source = null + // 2. Append (header’s first item, header’s second item) to headers. + appendHeader(headers, header[0], header[1]) + } + } else if (typeof object === 'object' && object !== null) { + // Note: null should throw - // 8. Let length be null. - let length = null + // 2. Otherwise, object is a record, then for each key → value in object, + // append (key, value) to headers + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) + } + } else { + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) + } +} - // 9. Let type be null. - let type = null +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) - // 10. Switch on object: - if (typeof object === 'string') { - // Set source to the UTF-8 encoding of object. - // Note: setting source to a Uint8Array here breaks some mocking assumptions. - source = object + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } - // Set type to `text/plain;charset=UTF-8`. - type = 'text/plain;charset=UTF-8' - } else if (object instanceof URLSearchParams) { - // URLSearchParams + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } - // spec says to run application/x-www-form-urlencoded on body.list - // this is implemented in Node.js as apart of an URLSearchParams instance toString method - // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 - // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. - // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. - source = object.toString() + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) - // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. - type = 'application/x-www-form-urlencoded;charset=UTF-8' - } else if (isArrayBuffer(object)) { - // BufferSource/ArrayBuffer + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} - // Set source to a copy of the bytes held by object. - source = new Uint8Array(object.slice()) - } else if (ArrayBuffer.isView(object)) { - // BufferSource/ArrayBufferView +class HeadersList { + /** @type {[string, string][]|null} */ + cookies = null - // Set source to a copy of the bytes held by object. - source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) - } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` - const prefix = `--${boundary}\r\nContent-Disposition: form-data` + constructor (init) { + if (init instanceof HeadersList) { + this[kHeadersMap] = new Map(init[kHeadersMap]) + this[kHeadersSortedMap] = init[kHeadersSortedMap] + this.cookies = init.cookies === null ? null : [...init.cookies] + } else { + this[kHeadersMap] = new Map(init) + this[kHeadersSortedMap] = null + } + } - /*! formdata-polyfill. MIT License. Jimmy Wärting */ - const escape = (str) => - str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') - const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') + // https://fetch.spec.whatwg.org/#header-list-contains + contains (name) { + // A header list list contains a header name name if list + // contains a header whose name is a byte-case-insensitive + // match for name. + name = name.toLowerCase() - // Set action to this step: run the multipart/form-data - // encoding algorithm, with object’s entry list and UTF-8. - // - This ensures that the body is immutable and can't be changed afterwords - // - That the content-length is calculated in advance. - // - And that all parts are pre-encoded and ready to be sent. + return this[kHeadersMap].has(name) + } - const blobParts = [] - const rn = new Uint8Array([13, 10]) // '\r\n' - length = 0 - let hasUnknownSizeValue = false + clear () { + this[kHeadersMap].clear() + this[kHeadersSortedMap] = null + this.cookies = null + } - for (const [name, value] of object) { - if (typeof value === 'string') { - const chunk = textEncoder.encode(prefix + - `; name="${escape(normalizeLinefeeds(name))}"` + - `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) - blobParts.push(chunk) - length += chunk.byteLength - } else { - const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + - (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + - `Content-Type: ${ - value.type || 'application/octet-stream' - }\r\n\r\n`) - blobParts.push(chunk, value, rn) - if (typeof value.size === 'number') { - length += chunk.byteLength + value.size + rn.byteLength - } else { - hasUnknownSizeValue = true - } - } + // https://fetch.spec.whatwg.org/#concept-header-list-append + append (name, value) { + this[kHeadersSortedMap] = null + + // 1. If list contains name, then set name to the first such + // header’s name. + const lowercaseName = name.toLowerCase() + const exists = this[kHeadersMap].get(lowercaseName) + + // 2. Append (name, value) to list. + if (exists) { + const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' + this[kHeadersMap].set(lowercaseName, { + name: exists.name, + value: `${exists.value}${delimiter}${value}` + }) + } else { + this[kHeadersMap].set(lowercaseName, { name, value }) } - const chunk = textEncoder.encode(`--${boundary}--`) - blobParts.push(chunk) - length += chunk.byteLength - if (hasUnknownSizeValue) { - length = null + if (lowercaseName === 'set-cookie') { + this.cookies ??= [] + this.cookies.push(value) } + } - // Set source to object. - source = object + // https://fetch.spec.whatwg.org/#concept-header-list-set + set (name, value) { + this[kHeadersSortedMap] = null + const lowercaseName = name.toLowerCase() - action = async function * () { - for (const part of blobParts) { - if (part.stream) { - yield * part.stream() - } else { - yield part - } - } + if (lowercaseName === 'set-cookie') { + this.cookies = [value] } - // Set type to `multipart/form-data; boundary=`, - // followed by the multipart/form-data boundary string generated - // by the multipart/form-data encoding algorithm. - type = 'multipart/form-data; boundary=' + boundary - } else if (isBlobLike(object)) { - // Blob - - // Set source to object. - source = object + // 1. If list contains name, then set the value of + // the first such header to value and remove the + // others. + // 2. Otherwise, append header (name, value) to list. + this[kHeadersMap].set(lowercaseName, { name, value }) + } - // Set length to object’s size. - length = object.size + // https://fetch.spec.whatwg.org/#concept-header-list-delete + delete (name) { + this[kHeadersSortedMap] = null - // If object’s type attribute is not the empty byte sequence, set - // type to its value. - if (object.type) { - type = object.type - } - } else if (typeof object[Symbol.asyncIterator] === 'function') { - // If keepalive is true, then throw a TypeError. - if (keepalive) { - throw new TypeError('keepalive') - } + name = name.toLowerCase() - // If object is disturbed or locked, then throw a TypeError. - if (util.isDisturbed(object) || object.locked) { - throw new TypeError( - 'Response body object should not be disturbed or locked' - ) + if (name === 'set-cookie') { + this.cookies = null } - stream = - object instanceof ReadableStream ? object : ReadableStreamFrom(object) + this[kHeadersMap].delete(name) } - // 11. If source is a byte sequence, then set action to a - // step that returns source and length to source’s length. - if (typeof source === 'string' || util.isBuffer(source)) { - length = Buffer.byteLength(source) + // https://fetch.spec.whatwg.org/#concept-header-list-get + get (name) { + const value = this[kHeadersMap].get(name.toLowerCase()) + + // 1. If list does not contain name, then return null. + // 2. Return the values of all headers in list whose name + // is a byte-case-insensitive match for name, + // separated from each other by 0x2C 0x20, in order. + return value === undefined ? null : value.value } - // 12. If action is non-null, then run these steps in in parallel: - if (action != null) { - // Run action. - let iterator - stream = new ReadableStream({ - async start () { - iterator = action(object)[Symbol.asyncIterator]() - }, - async pull (controller) { - const { value, done } = await iterator.next() - if (done) { - // When running action is done, close stream. - queueMicrotask(() => { - controller.close() - }) - } else { - // Whenever one or more bytes are available and stream is not errored, - // enqueue a Uint8Array wrapping an ArrayBuffer containing the available - // bytes into stream. - if (!isErrored(stream)) { - controller.enqueue(new Uint8Array(value)) - } - } - return controller.desiredSize > 0 - }, - async cancel (reason) { - await iterator.return() - }, - type: undefined - }) + * [Symbol.iterator] () { + // use the lowercased name + for (const [name, { value }] of this[kHeadersMap]) { + yield [name, value] + } } - // 13. Let body be a body whose stream is stream, source is source, - // and length is length. - const body = { stream, source, length } + get entries () { + const headers = {} - // 14. Return (body, type). - return [body, type] -} + if (this[kHeadersMap].size) { + for (const { name, value } of this[kHeadersMap].values()) { + headers[name] = value + } + } -// https://fetch.spec.whatwg.org/#bodyinit-safely-extract -function safelyExtractBody (object, keepalive = false) { - if (!ReadableStream) { - // istanbul ignore next - ReadableStream = (__nccwpck_require__(5356).ReadableStream) + return headers } +} - // To safely extract a body and a `Content-Type` value from - // a byte sequence or BodyInit object object, run these steps: +// https://fetch.spec.whatwg.org/#headers-class +class Headers { + constructor (init = undefined) { + if (init === kConstruct) { + return + } + this[kHeadersList] = new HeadersList() - // 1. If object is a ReadableStream object, then: - if (object instanceof ReadableStream) { - // Assert: object is neither disturbed nor locked. - // istanbul ignore next - assert(!util.isDisturbed(object), 'The body has already been consumed.') - // istanbul ignore next - assert(!object.locked, 'The stream is locked.') - } + // The new Headers(init) constructor steps are: - // 2. Return the results of extracting object. - return extractBody(object, keepalive) -} + // 1. Set this’s guard to "none". + this[kGuard] = 'none' -function cloneBody (body) { - // To clone a body body, run these steps: + // 2. If init is given, then fill this with init. + if (init !== undefined) { + init = webidl.converters.HeadersInit(init) + fill(this, init) + } + } - // https://fetch.spec.whatwg.org/#concept-body-clone + // https://fetch.spec.whatwg.org/#dom-headers-append + append (name, value) { + webidl.brandCheck(this, Headers) - // 1. Let « out1, out2 » be the result of teeing body’s stream. - const [out1, out2] = body.stream.tee() - const out2Clone = structuredClone(out2, { transfer: [out2] }) - // This, for whatever reasons, unrefs out2Clone which allows - // the process to exit by itself. - const [, finalClone] = out2Clone.tee() + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) - // 2. Set body’s stream to out1. - body.stream = out1 + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) - // 3. Return a body whose stream is out2 and other members are copied from body. - return { - stream: finalClone, - length: body.length, - source: body.source + return appendHeader(this, name, value) } -} -async function * consumeBody (body) { - if (body) { - if (isUint8Array(body)) { - yield body - } else { - const stream = body.stream + // https://fetch.spec.whatwg.org/#dom-headers-delete + delete (name) { + webidl.brandCheck(this, Headers) - if (util.isDisturbed(stream)) { - throw new TypeError('The body has already been consumed.') - } + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) - if (stream.locked) { - throw new TypeError('The stream is locked.') - } + name = webidl.converters.ByteString(name) - // Compat. - stream[kBodyUsed] = true + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.delete', + value: name, + type: 'header name' + }) + } - yield * stream + // 2. If this’s guard is "immutable", then throw a TypeError. + // 3. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 4. Otherwise, if this’s guard is "request-no-cors", name + // is not a no-CORS-safelisted request-header name, and + // name is not a privileged no-CORS request-header name, + // return. + // 5. Otherwise, if this’s guard is "response" and name is + // a forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO } - } -} -function throwIfAborted (state) { - if (state.aborted) { - throw new DOMException('The operation was aborted.', 'AbortError') + // 6. If this’s header list does not contain name, then + // return. + if (!this[kHeadersList].contains(name)) { + return + } + + // 7. Delete name from this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this. + this[kHeadersList].delete(name) } -} -function bodyMixinMethods (instance) { - const methods = { - blob () { - // The blob() method steps are to return the result of - // running consume body with this and the following step - // given a byte sequence bytes: return a Blob whose - // contents are bytes and whose type attribute is this’s - // MIME type. - return specConsumeBody(this, (bytes) => { - let mimeType = bodyMimeType(this) + // https://fetch.spec.whatwg.org/#dom-headers-get + get (name) { + webidl.brandCheck(this, Headers) - if (mimeType === 'failure') { - mimeType = '' - } else if (mimeType) { - mimeType = serializeAMimeType(mimeType) - } + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) - // Return a Blob whose contents are bytes and type attribute - // is mimeType. - return new Blob([bytes], { type: mimeType }) - }, instance) - }, + name = webidl.converters.ByteString(name) - arrayBuffer () { - // The arrayBuffer() method steps are to return the result - // of running consume body with this and the following step - // given a byte sequence bytes: return a new ArrayBuffer - // whose contents are bytes. - return specConsumeBody(this, (bytes) => { - return new Uint8Array(bytes).buffer - }, instance) - }, + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.get', + value: name, + type: 'header name' + }) + } - text () { - // The text() method steps are to return the result of running - // consume body with this and UTF-8 decode. - return specConsumeBody(this, utf8DecodeBytes, instance) - }, + // 2. Return the result of getting name from this’s header + // list. + return this[kHeadersList].get(name) + } - json () { - // The json() method steps are to return the result of running - // consume body with this and parse JSON from bytes. - return specConsumeBody(this, parseJSONFromBytes, instance) - }, + // https://fetch.spec.whatwg.org/#dom-headers-has + has (name) { + webidl.brandCheck(this, Headers) - async formData () { - webidl.brandCheck(this, instance) + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) - throwIfAborted(this[kState]) + name = webidl.converters.ByteString(name) - const contentType = this.headers.get('Content-Type') + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.has', + value: name, + type: 'header name' + }) + } - // If mimeType’s essence is "multipart/form-data", then: - if (/multipart\/form-data/.test(contentType)) { - const headers = {} - for (const [key, value] of this.headers) headers[key.toLowerCase()] = value + // 2. Return true if this’s header list contains name; + // otherwise false. + return this[kHeadersList].contains(name) + } - const responseFormData = new FormData() + // https://fetch.spec.whatwg.org/#dom-headers-set + set (name, value) { + webidl.brandCheck(this, Headers) - let busboy + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) - try { - busboy = new Busboy({ - headers, - preservePath: true - }) - } catch (err) { - throw new DOMException(`${err}`, 'AbortError') - } + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) - busboy.on('field', (name, value) => { - responseFormData.append(name, value) - }) - busboy.on('file', (name, value, filename, encoding, mimeType) => { - const chunks = [] + // 1. Normalize value. + value = headerValueNormalize(value) - if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { - let base64chunk = '' + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value, + type: 'header value' + }) + } - value.on('data', (chunk) => { - base64chunk += chunk.toString().replace(/[\r\n]/gm, '') + // 3. If this’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 5. Otherwise, if this’s guard is "request-no-cors" and + // name/value is not a no-CORS-safelisted request-header, + // return. + // 6. Otherwise, if this’s guard is "response" and name is a + // forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } - const end = base64chunk.length - base64chunk.length % 4 - chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) + // 7. Set (name, value) in this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this + this[kHeadersList].set(name, value) + } - base64chunk = base64chunk.slice(end) - }) - value.on('end', () => { - chunks.push(Buffer.from(base64chunk, 'base64')) - responseFormData.append(name, new File(chunks, filename, { type: mimeType })) - }) - } else { - value.on('data', (chunk) => { - chunks.push(chunk) - }) - value.on('end', () => { - responseFormData.append(name, new File(chunks, filename, { type: mimeType })) - }) - } - }) + // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie + getSetCookie () { + webidl.brandCheck(this, Headers) - const busboyResolve = new Promise((resolve, reject) => { - busboy.on('finish', resolve) - busboy.on('error', (err) => reject(new TypeError(err))) - }) + // 1. If this’s header list does not contain `Set-Cookie`, then return « ». + // 2. Return the values of all headers in this’s header list whose name is + // a byte-case-insensitive match for `Set-Cookie`, in order. - if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) - busboy.end() - await busboyResolve + const list = this[kHeadersList].cookies - return responseFormData - } else if (/application\/x-www-form-urlencoded/.test(contentType)) { - // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: + if (list) { + return [...list] + } - // 1. Let entries be the result of parsing bytes. - let entries - try { - let text = '' - // application/x-www-form-urlencoded parser will keep the BOM. - // https://url.spec.whatwg.org/#concept-urlencoded-parser - // Note that streaming decoder is stateful and cannot be reused - const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + return [] + } - for await (const chunk of consumeBody(this[kState].body)) { - if (!isUint8Array(chunk)) { - throw new TypeError('Expected Uint8Array chunk') - } - text += streamingDecoder.decode(chunk, { stream: true }) - } - text += streamingDecoder.decode() - entries = new URLSearchParams(text) - } catch (err) { - // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. - // 2. If entries is failure, then throw a TypeError. - throw Object.assign(new TypeError(), { cause: err }) - } + // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine + get [kHeadersSortedMap] () { + if (this[kHeadersList][kHeadersSortedMap]) { + return this[kHeadersList][kHeadersSortedMap] + } - // 3. Return a new FormData object whose entries are entries. - const formData = new FormData() - for (const [name, value] of entries) { - formData.append(name, value) + // 1. Let headers be an empty list of headers with the key being the name + // and value the value. + const headers = [] + + // 2. Let names be the result of convert header names to a sorted-lowercase + // set with all the names of the headers in list. + const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) + const cookies = this[kHeadersList].cookies + + // 3. For each name of names: + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] + // 1. If name is `set-cookie`, then: + if (name === 'set-cookie') { + // 1. Let values be a list of all values of headers in list whose name + // is a byte-case-insensitive match for name, in order. + + // 2. For each value of values: + // 1. Append (name, value) to headers. + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) } - return formData } else { - // Wait a tick before checking if the request has been aborted. - // Otherwise, a TypeError can be thrown when an AbortError should. - await Promise.resolve() + // 2. Otherwise: - throwIfAborted(this[kState]) + // 1. Let value be the result of getting name from list. - // Otherwise, throw a TypeError. - throw webidl.errors.exception({ - header: `${instance.name}.formData`, - message: 'Could not parse content as FormData.' - }) + // 2. Assert: value is non-null. + assert(value !== null) + + // 3. Append (name, value) to headers. + headers.push([name, value]) } } + + this[kHeadersList][kHeadersSortedMap] = headers + + // 4. Return headers. + return headers } - return methods -} + keys () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } -function mixinBody (prototype) { - Object.assign(prototype.prototype, bodyMixinMethods(prototype)) -} + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key' + ) + } -/** - * @see https://fetch.spec.whatwg.org/#concept-body-consume-body - * @param {Response|Request} object - * @param {(value: unknown) => unknown} convertBytesToJSValue - * @param {Response|Request} instance - */ -async function specConsumeBody (object, convertBytesToJSValue, instance) { - webidl.brandCheck(object, instance) + values () { + webidl.brandCheck(this, Headers) - throwIfAborted(object[kState]) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } - // 1. If object is unusable, then return a promise rejected - // with a TypeError. - if (bodyUnusable(object[kState].body)) { - throw new TypeError('Body is unusable') + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'value' + ) } - // 2. Let promise be a new promise. - const promise = createDeferredPromise() - - // 3. Let errorSteps given error be to reject promise with error. - const errorSteps = (error) => promise.reject(error) + entries () { + webidl.brandCheck(this, Headers) - // 4. Let successSteps given a byte sequence data be to resolve - // promise with the result of running convertBytesToJSValue - // with data. If that threw an exception, then run errorSteps - // with that exception. - const successSteps = (data) => { - try { - promise.resolve(convertBytesToJSValue(data)) - } catch (e) { - errorSteps(e) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') } - } - // 5. If object’s body is null, then run successSteps with an - // empty byte sequence. - if (object[kState].body == null) { - successSteps(new Uint8Array()) - return promise.promise + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key+value' + ) } - // 6. Otherwise, fully read object’s body given successSteps, - // errorSteps, and object’s relevant global object. - await fullyReadBody(object[kState].body, successSteps, errorSteps) + /** + * @param {(value: string, key: string, self: Headers) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, Headers) - // 7. Return promise. - return promise.promise -} + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) -// https://fetch.spec.whatwg.org/#body-unusable -function bodyUnusable (body) { - // An object including the Body interface mixin is - // said to be unusable if its body is non-null and - // its body’s stream is disturbed or locked. - return body != null && (body.stream.locked || util.isDisturbed(body.stream)) -} + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." + ) + } -/** - * @see https://encoding.spec.whatwg.org/#utf-8-decode - * @param {Buffer} buffer - */ -function utf8DecodeBytes (buffer) { - if (buffer.length === 0) { - return '' + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } } - // 1. Let buffer be the result of peeking three bytes from - // ioQueue, converted to a byte sequence. + [Symbol.for('nodejs.util.inspect.custom')] () { + webidl.brandCheck(this, Headers) - // 2. If buffer is 0xEF 0xBB 0xBF, then read three - // bytes from ioQueue. (Do nothing with those bytes.) - if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { - buffer = buffer.subarray(3) + return this[kHeadersList] } - - // 3. Process a queue with an instance of UTF-8’s - // decoder, ioQueue, output, and "replacement". - const output = textDecoder.decode(buffer) - - // 4. Return output. - return output } -/** - * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value - * @param {Uint8Array} bytes - */ -function parseJSONFromBytes (bytes) { - return JSON.parse(utf8DecodeBytes(bytes)) -} +Headers.prototype[Symbol.iterator] = Headers.prototype.entries -/** - * @see https://fetch.spec.whatwg.org/#concept-body-mime-type - * @param {import('./response').Response|import('./request').Request} object - */ -function bodyMimeType (object) { - const { headersList } = object[kState] - const contentType = headersList.get('content-type') +Object.defineProperties(Headers.prototype, { + append: kEnumerableProperty, + delete: kEnumerableProperty, + get: kEnumerableProperty, + has: kEnumerableProperty, + set: kEnumerableProperty, + getSetCookie: kEnumerableProperty, + keys: kEnumerableProperty, + values: kEnumerableProperty, + entries: kEnumerableProperty, + forEach: kEnumerableProperty, + [Symbol.iterator]: { enumerable: false }, + [Symbol.toStringTag]: { + value: 'Headers', + configurable: true + } +}) - if (contentType === null) { - return 'failure' +webidl.converters.HeadersInit = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (V[Symbol.iterator]) { + return webidl.converters['sequence>'](V) + } + + return webidl.converters['record'](V) } - return parseMIMEType(contentType) + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) } module.exports = { - extractBody, - safelyExtractBody, - cloneBody, - mixinBody + fill, + Headers, + HeadersList } /***/ }), -/***/ 1037: +/***/ 74881: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch -const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(1267) -const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] -const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) +const { + Response, + makeNetworkError, + makeAppropriateNetworkError, + filterResponse, + makeResponse +} = __nccwpck_require__(27823) +const { Headers } = __nccwpck_require__(10554) +const { Request, makeRequest } = __nccwpck_require__(48359) +const zlib = __nccwpck_require__(59796) +const { + bytesMatch, + makePolicyContainer, + clonePolicyContainer, + requestBadPort, + TAOCheck, + appendRequestOriginHeader, + responseLocationURL, + requestCurrentURL, + setRequestReferrerPolicyOnRedirect, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + createOpaqueTimingInfo, + appendFetchMetadata, + corsCheck, + crossOriginResourcePolicyCheck, + determineRequestsReferrer, + coarsenedSharedCurrentTime, + createDeferredPromise, + isBlobLike, + sameOrigin, + isCancelled, + isAborted, + isErrorLike, + fullyReadBody, + readableStreamClose, + isomorphicEncode, + urlIsLocal, + urlIsHttpHttpsScheme, + urlHasHttpsScheme +} = __nccwpck_require__(52538) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(15861) +const assert = __nccwpck_require__(39491) +const { safelyExtractBody } = __nccwpck_require__(41472) +const { + redirectStatusSet, + nullBodyStatus, + safeMethodsSet, + requestBodyHeader, + subresourceSet, + DOMException +} = __nccwpck_require__(41037) +const { kHeadersList } = __nccwpck_require__(72785) +const EE = __nccwpck_require__(82361) +const { Readable, pipeline } = __nccwpck_require__(12781) +const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(83983) +const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(685) +const { TransformStream } = __nccwpck_require__(35356) +const { getGlobalDispatcher } = __nccwpck_require__(21892) +const { webidl } = __nccwpck_require__(21744) +const { STATUS_CODES } = __nccwpck_require__(13685) +const GET_OR_HEAD = ['GET', 'HEAD'] -const nullBodyStatus = [101, 204, 205, 304] +/** @type {import('buffer').resolveObjectURL} */ +let resolveObjectURL +let ReadableStream = globalThis.ReadableStream -const redirectStatus = [301, 302, 303, 307, 308] -const redirectStatusSet = new Set(redirectStatus) +class Fetch extends EE { + constructor (dispatcher) { + super() -// https://fetch.spec.whatwg.org/#block-bad-port -const badPorts = [ - '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', - '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', - '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', - '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', - '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', - '10080' -] + this.dispatcher = dispatcher + this.connection = null + this.dump = false + this.state = 'ongoing' + // 2 terminated listeners get added per request, + // but only 1 gets removed. If there are 20 redirects, + // 21 listeners will be added. + // See https://github.com/nodejs/undici/issues/1711 + // TODO (fix): Find and fix root cause for leaked listener. + this.setMaxListeners(21) + } -const badPortsSet = new Set(badPorts) + terminate (reason) { + if (this.state !== 'ongoing') { + return + } -// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies -const referrerPolicy = [ - '', - 'no-referrer', - 'no-referrer-when-downgrade', - 'same-origin', - 'origin', - 'strict-origin', - 'origin-when-cross-origin', - 'strict-origin-when-cross-origin', - 'unsafe-url' -] -const referrerPolicySet = new Set(referrerPolicy) + this.state = 'terminated' + this.connection?.destroy(reason) + this.emit('terminated', reason) + } -const requestRedirect = ['follow', 'manual', 'error'] + // https://fetch.spec.whatwg.org/#fetch-controller-abort + abort (error) { + if (this.state !== 'ongoing') { + return + } -const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] -const safeMethodsSet = new Set(safeMethods) + // 1. Set controller’s state to "aborted". + this.state = 'aborted' -const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + // 2. Let fallbackError be an "AbortError" DOMException. + // 3. Set error to fallbackError if it is not given. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } -const requestCredentials = ['omit', 'same-origin', 'include'] + // 4. Let serializedError be StructuredSerialize(error). + // If that threw an exception, catch it, and let + // serializedError be StructuredSerialize(fallbackError). -const requestCache = [ - 'default', - 'no-store', - 'reload', - 'no-cache', - 'force-cache', - 'only-if-cached' -] + // 5. Set controller’s serialized abort reason to serializedError. + this.serializedAbortReason = error -// https://fetch.spec.whatwg.org/#request-body-header-name -const requestBodyHeader = [ - 'content-encoding', - 'content-language', - 'content-location', - 'content-type', - // See https://github.com/nodejs/undici/issues/2021 - // 'Content-Length' is a forbidden header name, which is typically - // removed in the Headers implementation. However, undici doesn't - // filter out headers, so we add it here. - 'content-length' -] + this.connection?.destroy(error) + this.emit('terminated', error) + } +} -// https://fetch.spec.whatwg.org/#enumdef-requestduplex -const requestDuplex = [ - 'half' -] +// https://fetch.spec.whatwg.org/#fetch-method +function fetch (input, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) -// http://fetch.spec.whatwg.org/#forbidden-method -const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] -const forbiddenMethodsSet = new Set(forbiddenMethods) + // 1. Let p be a new promise. + const p = createDeferredPromise() -const subresource = [ - 'audio', - 'audioworklet', - 'font', - 'image', - 'manifest', - 'paintworklet', - 'script', - 'style', - 'track', - 'video', - 'xslt', - '' -] -const subresourceSet = new Set(subresource) + // 2. Let requestObject be the result of invoking the initial value of + // Request as constructor with input and init as arguments. If this throws + // an exception, reject p with it and return p. + let requestObject -/** @type {globalThis['DOMException']} */ -const DOMException = globalThis.DOMException ?? (() => { - // DOMException was only made a global in Node v17.0.0, - // but fetch supports >= v16.8. try { - atob('~') - } catch (err) { - return Object.getPrototypeOf(err).constructor + requestObject = new Request(input, init) + } catch (e) { + p.reject(e) + return p.promise } -})() -let channel + // 3. Let request be requestObject’s request. + const request = requestObject[kState] -/** @type {globalThis['structuredClone']} */ -const structuredClone = - globalThis.structuredClone ?? - // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js - // structuredClone was added in v17.0.0, but fetch supports v16.8 - function structuredClone (value, options = undefined) { - if (arguments.length === 0) { - throw new TypeError('missing argument') - } + // 4. If requestObject’s signal’s aborted flag is set, then: + if (requestObject.signal.aborted) { + // 1. Abort the fetch() call with p, request, null, and + // requestObject’s signal’s abort reason. + abortFetch(p, request, null, requestObject.signal.reason) - if (!channel) { - channel = new MessageChannel() - } - channel.port1.unref() - channel.port2.unref() - channel.port1.postMessage(value, options?.transfer) - return receiveMessageOnPort(channel.port2).message + // 2. Return p. + return p.promise } -module.exports = { - DOMException, - structuredClone, - subresource, - forbiddenMethods, - requestBodyHeader, - referrerPolicy, - requestRedirect, - requestMode, - requestCredentials, - requestCache, - redirectStatus, - corsSafeListedMethods, - nullBodyStatus, - safeMethods, - badPorts, - requestDuplex, - subresourceSet, - badPortsSet, - redirectStatusSet, - corsSafeListedMethodsSet, - safeMethodsSet, - forbiddenMethodsSet, - referrerPolicySet -} - - -/***/ }), + // 5. Let globalObject be request’s client’s global object. + const globalObject = request.client.globalObject -/***/ 685: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 6. If globalObject is a ServiceWorkerGlobalScope object, then set + // request’s service-workers mode to "none". + if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { + request.serviceWorkers = 'none' + } -const assert = __nccwpck_require__(9491) -const { atob } = __nccwpck_require__(4300) -const { isomorphicDecode } = __nccwpck_require__(2538) + // 7. Let responseObject be null. + let responseObject = null -const encoder = new TextEncoder() + // 8. Let relevantRealm be this’s relevant Realm. + const relevantRealm = null -/** - * @see https://mimesniff.spec.whatwg.org/#http-token-code-point - */ -const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ -const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line -/** - * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point - */ -const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line + // 9. Let locallyAborted be false. + let locallyAborted = false -// https://fetch.spec.whatwg.org/#data-url-processor -/** @param {URL} dataURL */ -function dataURLProcessor (dataURL) { - // 1. Assert: dataURL’s scheme is "data". - assert(dataURL.protocol === 'data:') + // 10. Let controller be null. + let controller = null - // 2. Let input be the result of running the URL - // serializer on dataURL with exclude fragment - // set to true. - let input = URLSerializer(dataURL, true) + // 11. Add the following abort steps to requestObject’s signal: + addAbortListener( + requestObject.signal, + () => { + // 1. Set locallyAborted to true. + locallyAborted = true - // 3. Remove the leading "data:" string from input. - input = input.slice(5) + // 2. Assert: controller is non-null. + assert(controller != null) - // 4. Let position point at the start of input. - const position = { position: 0 } + // 3. Abort controller with requestObject’s signal’s abort reason. + controller.abort(requestObject.signal.reason) - // 5. Let mimeType be the result of collecting a - // sequence of code points that are not equal - // to U+002C (,), given position. - let mimeType = collectASequenceOfCodePointsFast( - ',', - input, - position + // 4. Abort the fetch() call with p, request, responseObject, + // and requestObject’s signal’s abort reason. + abortFetch(p, request, responseObject, requestObject.signal.reason) + } ) - // 6. Strip leading and trailing ASCII whitespace - // from mimeType. - // Undici implementation note: we need to store the - // length because if the mimetype has spaces removed, - // the wrong amount will be sliced from the input in - // step #9 - const mimeTypeLength = mimeType.length - mimeType = removeASCIIWhitespace(mimeType, true, true) - - // 7. If position is past the end of input, then - // return failure - if (position.position >= input.length) { - return 'failure' - } - - // 8. Advance position by 1. - position.position++ - - // 9. Let encodedBody be the remainder of input. - const encodedBody = input.slice(mimeTypeLength + 1) - - // 10. Let body be the percent-decoding of encodedBody. - let body = stringPercentDecode(encodedBody) - - // 11. If mimeType ends with U+003B (;), followed by - // zero or more U+0020 SPACE, followed by an ASCII - // case-insensitive match for "base64", then: - if (/;(\u0020){0,}base64$/i.test(mimeType)) { - // 1. Let stringBody be the isomorphic decode of body. - const stringBody = isomorphicDecode(body) + // 12. Let handleFetchDone given response response be to finalize and + // report timing with response, globalObject, and "fetch". + const handleFetchDone = (response) => + finalizeAndReportTiming(response, 'fetch') - // 2. Set body to the forgiving-base64 decode of - // stringBody. - body = forgivingBase64(stringBody) + // 13. Set controller to the result of calling fetch given request, + // with processResponseEndOfBody set to handleFetchDone, and processResponse + // given response being these substeps: - // 3. If body is failure, then return failure. - if (body === 'failure') { - return 'failure' + const processResponse = (response) => { + // 1. If locallyAborted is true, terminate these substeps. + if (locallyAborted) { + return Promise.resolve() } - // 4. Remove the last 6 code points from mimeType. - mimeType = mimeType.slice(0, -6) + // 2. If response’s aborted flag is set, then: + if (response.aborted) { + // 1. Let deserializedError be the result of deserialize a serialized + // abort reason given controller’s serialized abort reason and + // relevantRealm. - // 5. Remove trailing U+0020 SPACE code points from mimeType, - // if any. - mimeType = mimeType.replace(/(\u0020)+$/, '') + // 2. Abort the fetch() call with p, request, responseObject, and + // deserializedError. - // 6. Remove the last U+003B (;) code point from mimeType. - mimeType = mimeType.slice(0, -1) - } + abortFetch(p, request, responseObject, controller.serializedAbortReason) + return Promise.resolve() + } - // 12. If mimeType starts with U+003B (;), then prepend - // "text/plain" to mimeType. - if (mimeType.startsWith(';')) { - mimeType = 'text/plain' + mimeType - } + // 3. If response is a network error, then reject p with a TypeError + // and terminate these substeps. + if (response.type === 'error') { + p.reject( + Object.assign(new TypeError('fetch failed'), { cause: response.error }) + ) + return Promise.resolve() + } - // 13. Let mimeTypeRecord be the result of parsing - // mimeType. - let mimeTypeRecord = parseMIMEType(mimeType) + // 4. Set responseObject to the result of creating a Response object, + // given response, "immutable", and relevantRealm. + responseObject = new Response() + responseObject[kState] = response + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm - // 14. If mimeTypeRecord is failure, then set - // mimeTypeRecord to text/plain;charset=US-ASCII. - if (mimeTypeRecord === 'failure') { - mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + // 5. Resolve p with responseObject. + p.resolve(responseObject) } - // 15. Return a new data: URL struct whose MIME - // type is mimeTypeRecord and body is body. - // https://fetch.spec.whatwg.org/#data-url-struct - return { mimeType: mimeTypeRecord, body } + controller = fetching({ + request, + processResponseEndOfBody: handleFetchDone, + processResponse, + dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici + }) + + // 14. Return p. + return p.promise } -// https://url.spec.whatwg.org/#concept-url-serializer -/** - * @param {URL} url - * @param {boolean} excludeFragment - */ -function URLSerializer (url, excludeFragment = false) { - if (!excludeFragment) { - return url.href +// https://fetch.spec.whatwg.org/#finalize-and-report-timing +function finalizeAndReportTiming (response, initiatorType = 'other') { + // 1. If response is an aborted network error, then return. + if (response.type === 'error' && response.aborted) { + return } - const href = url.href - const hashLength = url.hash.length + // 2. If response’s URL list is null or empty, then return. + if (!response.urlList?.length) { + return + } - return hashLength === 0 ? href : href.substring(0, href.length - hashLength) -} + // 3. Let originalURL be response’s URL list[0]. + const originalURL = response.urlList[0] -// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points -/** - * @param {(char: string) => boolean} condition - * @param {string} input - * @param {{ position: number }} position - */ -function collectASequenceOfCodePoints (condition, input, position) { - // 1. Let result be the empty string. - let result = '' + // 4. Let timingInfo be response’s timing info. + let timingInfo = response.timingInfo - // 2. While position doesn’t point past the end of input and the - // code point at position within input meets the condition condition: - while (position.position < input.length && condition(input[position.position])) { - // 1. Append that code point to the end of result. - result += input[position.position] + // 5. Let cacheState be response’s cache state. + let cacheState = response.cacheState - // 2. Advance position by 1. - position.position++ + // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. + if (!urlIsHttpHttpsScheme(originalURL)) { + return } - // 3. Return result. - return result -} + // 7. If timingInfo is null, then return. + if (timingInfo === null) { + return + } -/** - * A faster collectASequenceOfCodePoints that only works when comparing a single character. - * @param {string} char - * @param {string} input - * @param {{ position: number }} position - */ -function collectASequenceOfCodePointsFast (char, input, position) { - const idx = input.indexOf(char, position.position) - const start = position.position + // 8. If response’s timing allow passed flag is not set, then: + if (!response.timingAllowPassed) { + // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. + timingInfo = createOpaqueTimingInfo({ + startTime: timingInfo.startTime + }) - if (idx === -1) { - position.position = input.length - return input.slice(start) + // 2. Set cacheState to the empty string. + cacheState = '' } - position.position = idx - return input.slice(start, position.position) -} + // 9. Set timingInfo’s end time to the coarsened shared current time + // given global’s relevant settings object’s cross-origin isolated + // capability. + // TODO: given global’s relevant settings object’s cross-origin isolated + // capability? + timingInfo.endTime = coarsenedSharedCurrentTime() -// https://url.spec.whatwg.org/#string-percent-decode -/** @param {string} input */ -function stringPercentDecode (input) { - // 1. Let bytes be the UTF-8 encoding of input. - const bytes = encoder.encode(input) + // 10. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo - // 2. Return the percent-decoding of bytes. - return percentDecode(bytes) + // 11. Mark resource timing for timingInfo, originalURL, initiatorType, + // global, and cacheState. + markResourceTiming( + timingInfo, + originalURL, + initiatorType, + globalThis, + cacheState + ) } -// https://url.spec.whatwg.org/#percent-decode -/** @param {Uint8Array} input */ -function percentDecode (input) { - // 1. Let output be an empty byte sequence. - /** @type {number[]} */ - const output = [] - - // 2. For each byte byte in input: - for (let i = 0; i < input.length; i++) { - const byte = input[i] - - // 1. If byte is not 0x25 (%), then append byte to output. - if (byte !== 0x25) { - output.push(byte) - - // 2. Otherwise, if byte is 0x25 (%) and the next two bytes - // after byte in input are not in the ranges - // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), - // and 0x61 (a) to 0x66 (f), all inclusive, append byte - // to output. - } else if ( - byte === 0x25 && - !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) - ) { - output.push(0x25) - - // 3. Otherwise: - } else { - // 1. Let bytePoint be the two bytes after byte in input, - // decoded, and then interpreted as hexadecimal number. - const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) - const bytePoint = Number.parseInt(nextTwoBytes, 16) - - // 2. Append a byte whose value is bytePoint to output. - output.push(bytePoint) - - // 3. Skip the next two bytes in input. - i += 2 - } +// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing +function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { + if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { + performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) } - - // 3. Return output. - return Uint8Array.from(output) } -// https://mimesniff.spec.whatwg.org/#parse-a-mime-type -/** @param {string} input */ -function parseMIMEType (input) { - // 1. Remove any leading and trailing HTTP whitespace - // from input. - input = removeHTTPWhitespace(input, true, true) - - // 2. Let position be a position variable for input, - // initially pointing at the start of input. - const position = { position: 0 } +// https://fetch.spec.whatwg.org/#abort-fetch +function abortFetch (p, request, responseObject, error) { + // Note: AbortSignal.reason was added in node v17.2.0 + // which would give us an undefined error to reject with. + // Remove this once node v16 is no longer supported. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } - // 3. Let type be the result of collecting a sequence - // of code points that are not U+002F (/) from - // input, given position. - const type = collectASequenceOfCodePointsFast( - '/', - input, - position - ) + // 1. Reject promise with error. + p.reject(error) - // 4. If type is the empty string or does not solely - // contain HTTP token code points, then return failure. - // https://mimesniff.spec.whatwg.org/#http-token-code-point - if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { - return 'failure' + // 2. If request’s body is not null and is readable, then cancel request’s + // body with error. + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) } - // 5. If position is past the end of input, then return - // failure - if (position.position > input.length) { - return 'failure' + // 3. If responseObject is null, then return. + if (responseObject == null) { + return } - // 6. Advance position by 1. (This skips past U+002F (/).) - position.position++ - - // 7. Let subtype be the result of collecting a sequence of - // code points that are not U+003B (;) from input, given - // position. - let subtype = collectASequenceOfCodePointsFast( - ';', - input, - position - ) - - // 8. Remove any trailing HTTP whitespace from subtype. - subtype = removeHTTPWhitespace(subtype, false, true) + // 4. Let response be responseObject’s response. + const response = responseObject[kState] - // 9. If subtype is the empty string or does not solely - // contain HTTP token code points, then return failure. - if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { - return 'failure' + // 5. If response’s body is not null and is readable, then error response’s + // body with error. + if (response.body != null && isReadable(response.body?.stream)) { + response.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) } +} - const typeLowercase = type.toLowerCase() - const subtypeLowercase = subtype.toLowerCase() +// https://fetch.spec.whatwg.org/#fetching +function fetching ({ + request, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseEndOfBody, + processResponseConsumeBody, + useParallelQueue = false, + dispatcher // undici +}) { + // 1. Let taskDestination be null. + let taskDestination = null - // 10. Let mimeType be a new MIME type record whose type - // is type, in ASCII lowercase, and subtype is subtype, - // in ASCII lowercase. - // https://mimesniff.spec.whatwg.org/#mime-type - const mimeType = { - type: typeLowercase, - subtype: subtypeLowercase, - /** @type {Map} */ - parameters: new Map(), - // https://mimesniff.spec.whatwg.org/#mime-type-essence - essence: `${typeLowercase}/${subtypeLowercase}` - } + // 2. Let crossOriginIsolatedCapability be false. + let crossOriginIsolatedCapability = false - // 11. While position is not past the end of input: - while (position.position < input.length) { - // 1. Advance position by 1. (This skips past U+003B (;).) - position.position++ + // 3. If request’s client is non-null, then: + if (request.client != null) { + // 1. Set taskDestination to request’s client’s global object. + taskDestination = request.client.globalObject - // 2. Collect a sequence of code points that are HTTP - // whitespace from input given position. - collectASequenceOfCodePoints( - // https://fetch.spec.whatwg.org/#http-whitespace - char => HTTP_WHITESPACE_REGEX.test(char), - input, - position - ) + // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin + // isolated capability. + crossOriginIsolatedCapability = + request.client.crossOriginIsolatedCapability + } - // 3. Let parameterName be the result of collecting a - // sequence of code points that are not U+003B (;) - // or U+003D (=) from input, given position. - let parameterName = collectASequenceOfCodePoints( - (char) => char !== ';' && char !== '=', - input, - position - ) + // 4. If useParallelQueue is true, then set taskDestination to the result of + // starting a new parallel queue. + // TODO - // 4. Set parameterName to parameterName, in ASCII - // lowercase. - parameterName = parameterName.toLowerCase() + // 5. Let timingInfo be a new fetch timing info whose start time and + // post-redirect start time are the coarsened shared current time given + // crossOriginIsolatedCapability. + const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) + const timingInfo = createOpaqueTimingInfo({ + startTime: currenTime + }) - // 5. If position is not past the end of input, then: - if (position.position < input.length) { - // 1. If the code point at position within input is - // U+003B (;), then continue. - if (input[position.position] === ';') { - continue - } + // 6. Let fetchParams be a new fetch params whose + // request is request, + // timing info is timingInfo, + // process request body chunk length is processRequestBodyChunkLength, + // process request end-of-body is processRequestEndOfBody, + // process response is processResponse, + // process response consume body is processResponseConsumeBody, + // process response end-of-body is processResponseEndOfBody, + // task destination is taskDestination, + // and cross-origin isolated capability is crossOriginIsolatedCapability. + const fetchParams = { + controller: new Fetch(dispatcher), + request, + timingInfo, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseConsumeBody, + processResponseEndOfBody, + taskDestination, + crossOriginIsolatedCapability + } - // 2. Advance position by 1. (This skips past U+003D (=).) - position.position++ - } + // 7. If request’s body is a byte sequence, then set request’s body to + // request’s body as a body. + // NOTE: Since fetching is only called from fetch, body should already be + // extracted. + assert(!request.body || request.body.stream) - // 6. If position is past the end of input, then break. - if (position.position > input.length) { - break - } + // 8. If request’s window is "client", then set request’s window to request’s + // client, if request’s client’s global object is a Window object; otherwise + // "no-window". + if (request.window === 'client') { + // TODO: What if request.client is null? + request.window = + request.client?.globalObject?.constructor?.name === 'Window' + ? request.client + : 'no-window' + } - // 7. Let parameterValue be null. - let parameterValue = null + // 9. If request’s origin is "client", then set request’s origin to request’s + // client’s origin. + if (request.origin === 'client') { + // TODO: What if request.client is null? + request.origin = request.client?.origin + } - // 8. If the code point at position within input is - // U+0022 ("), then: - if (input[position.position] === '"') { - // 1. Set parameterValue to the result of collecting - // an HTTP quoted string from input, given position - // and the extract-value flag. - parameterValue = collectAnHTTPQuotedString(input, position, true) + // 10. If all of the following conditions are true: + // TODO - // 2. Collect a sequence of code points that are not - // U+003B (;) from input, given position. - collectASequenceOfCodePointsFast( - ';', - input, - position + // 11. If request’s policy container is "client", then: + if (request.policyContainer === 'client') { + // 1. If request’s client is non-null, then set request’s policy + // container to a clone of request’s client’s policy container. [HTML] + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer ) - - // 9. Otherwise: } else { - // 1. Set parameterValue to the result of collecting - // a sequence of code points that are not U+003B (;) - // from input, given position. - parameterValue = collectASequenceOfCodePointsFast( - ';', - input, - position - ) - - // 2. Remove any trailing HTTP whitespace from parameterValue. - parameterValue = removeHTTPWhitespace(parameterValue, false, true) - - // 3. If parameterValue is the empty string, then continue. - if (parameterValue.length === 0) { - continue - } - } - - // 10. If all of the following are true - // - parameterName is not the empty string - // - parameterName solely contains HTTP token code points - // - parameterValue solely contains HTTP quoted-string token code points - // - mimeType’s parameters[parameterName] does not exist - // then set mimeType’s parameters[parameterName] to parameterValue. - if ( - parameterName.length !== 0 && - HTTP_TOKEN_CODEPOINTS.test(parameterName) && - (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && - !mimeType.parameters.has(parameterName) - ) { - mimeType.parameters.set(parameterName, parameterValue) + // 2. Otherwise, set request’s policy container to a new policy + // container. + request.policyContainer = makePolicyContainer() } } - // 12. Return mimeType. - return mimeType -} + // 12. If request’s header list does not contain `Accept`, then: + if (!request.headersList.contains('accept')) { + // 1. Let value be `*/*`. + const value = '*/*' -// https://infra.spec.whatwg.org/#forgiving-base64-decode -/** @param {string} data */ -function forgivingBase64 (data) { - // 1. Remove all ASCII whitespace from data. - data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line + // 2. A user agent should set value to the first matching statement, if + // any, switching on request’s destination: + // "document" + // "frame" + // "iframe" + // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` + // "image" + // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` + // "style" + // `text/css,*/*;q=0.1` + // TODO - // 2. If data’s code point length divides by 4 leaving - // no remainder, then: - if (data.length % 4 === 0) { - // 1. If data ends with one or two U+003D (=) code points, - // then remove them from data. - data = data.replace(/=?=$/, '') + // 3. Append `Accept`/value to request’s header list. + request.headersList.append('accept', value) } - // 3. If data’s code point length divides by 4 leaving - // a remainder of 1, then return failure. - if (data.length % 4 === 1) { - return 'failure' + // 13. If request’s header list does not contain `Accept-Language`, then + // user agents should append `Accept-Language`/an appropriate value to + // request’s header list. + if (!request.headersList.contains('accept-language')) { + request.headersList.append('accept-language', '*') } - // 4. If data contains a code point that is not one of - // U+002B (+) - // U+002F (/) - // ASCII alphanumeric - // then return failure. - if (/[^+/0-9A-Za-z]/.test(data)) { - return 'failure' + // 14. If request’s priority is null, then use request’s initiator and + // destination appropriately in setting request’s priority to a + // user-agent-defined object. + if (request.priority === null) { + // TODO } - const binary = atob(data) - const bytes = new Uint8Array(binary.length) - - for (let byte = 0; byte < binary.length; byte++) { - bytes[byte] = binary.charCodeAt(byte) + // 15. If request is a subresource request, then: + if (subresourceSet.has(request.destination)) { + // TODO } - return bytes + // 16. Run main fetch given fetchParams. + mainFetch(fetchParams) + .catch(err => { + fetchParams.controller.terminate(err) + }) + + // 17. Return fetchParam's controller + return fetchParams.controller } -// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string -// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string -/** - * @param {string} input - * @param {{ position: number }} position - * @param {boolean?} extractValue - */ -function collectAnHTTPQuotedString (input, position, extractValue) { - // 1. Let positionStart be position. - const positionStart = position.position +// https://fetch.spec.whatwg.org/#concept-main-fetch +async function mainFetch (fetchParams, recursive = false) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - // 2. Let value be the empty string. - let value = '' + // 2. Let response be null. + let response = null - // 3. Assert: the code point at position within input - // is U+0022 ("). - assert(input[position.position] === '"') + // 3. If request’s local-URLs-only flag is set and request’s current URL is + // not local, then set response to a network error. + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { + response = makeNetworkError('local URLs only') + } - // 4. Advance position by 1. - position.position++ + // 4. Run report Content Security Policy violations for request. + // TODO - // 5. While true: - while (true) { - // 1. Append the result of collecting a sequence of code points - // that are not U+0022 (") or U+005C (\) from input, given - // position, to value. - value += collectASequenceOfCodePoints( - (char) => char !== '"' && char !== '\\', - input, - position - ) + // 5. Upgrade request to a potentially trustworthy URL, if appropriate. + tryUpgradeRequestToAPotentiallyTrustworthyURL(request) - // 2. If position is past the end of input, then break. - if (position.position >= input.length) { - break - } + // 6. If should request be blocked due to a bad port, should fetching request + // be blocked as mixed content, or should request be blocked by Content + // Security Policy returns blocked, then set response to a network error. + if (requestBadPort(request) === 'blocked') { + response = makeNetworkError('bad port') + } + // TODO: should fetching request be blocked as mixed content? + // TODO: should request be blocked by Content Security Policy? - // 3. Let quoteOrBackslash be the code point at position within - // input. - const quoteOrBackslash = input[position.position] + // 7. If request’s referrer policy is the empty string, then set request’s + // referrer policy to request’s policy container’s referrer policy. + if (request.referrerPolicy === '') { + request.referrerPolicy = request.policyContainer.referrerPolicy + } - // 4. Advance position by 1. - position.position++ + // 8. If request’s referrer is not "no-referrer", then set request’s + // referrer to the result of invoking determine request’s referrer. + if (request.referrer !== 'no-referrer') { + request.referrer = determineRequestsReferrer(request) + } - // 5. If quoteOrBackslash is U+005C (\), then: - if (quoteOrBackslash === '\\') { - // 1. If position is past the end of input, then append - // U+005C (\) to value and break. - if (position.position >= input.length) { - value += '\\' - break - } + // 9. Set request’s current URL’s scheme to "https" if all of the following + // conditions are true: + // - request’s current URL’s scheme is "http" + // - request’s current URL’s host is a domain + // - Matching request’s current URL’s host per Known HSTS Host Domain Name + // Matching results in either a superdomain match with an asserted + // includeSubDomains directive or a congruent match (with or without an + // asserted includeSubDomains directive). [HSTS] + // TODO - // 2. Append the code point at position within input to value. - value += input[position.position] + // 10. If recursive is false, then run the remaining steps in parallel. + // TODO - // 3. Advance position by 1. - position.position++ + // 11. If response is null, then set response to the result of running + // the steps corresponding to the first matching statement: + if (response === null) { + response = await (async () => { + const currentURL = requestCurrentURL(request) - // 6. Otherwise: - } else { - // 1. Assert: quoteOrBackslash is U+0022 ("). - assert(quoteOrBackslash === '"') + if ( + // - request’s current URL’s origin is same origin with request’s origin, + // and request’s response tainting is "basic" + (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || + // request’s current URL’s scheme is "data" + (currentURL.protocol === 'data:') || + // - request’s mode is "navigate" or "websocket" + (request.mode === 'navigate' || request.mode === 'websocket') + ) { + // 1. Set request’s response tainting to "basic". + request.responseTainting = 'basic' - // 2. Break. - break - } - } + // 2. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } - // 6. If the extract-value flag is set, then return value. - if (extractValue) { - return value - } + // request’s mode is "same-origin" + if (request.mode === 'same-origin') { + // 1. Return a network error. + return makeNetworkError('request mode cannot be "same-origin"') + } - // 7. Return the code points from positionStart to position, - // inclusive, within input. - return input.slice(positionStart, position.position) -} + // request’s mode is "no-cors" + if (request.mode === 'no-cors') { + // 1. If request’s redirect mode is not "follow", then return a network + // error. + if (request.redirect !== 'follow') { + return makeNetworkError( + 'redirect mode cannot be "follow" for "no-cors" request' + ) + } -/** - * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type - */ -function serializeAMimeType (mimeType) { - assert(mimeType !== 'failure') - const { parameters, essence } = mimeType + // 2. Set request’s response tainting to "opaque". + request.responseTainting = 'opaque' - // 1. Let serialization be the concatenation of mimeType’s - // type, U+002F (/), and mimeType’s subtype. - let serialization = essence + // 3. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } - // 2. For each name → value of mimeType’s parameters: - for (let [name, value] of parameters.entries()) { - // 1. Append U+003B (;) to serialization. - serialization += ';' + // request’s current URL’s scheme is not an HTTP(S) scheme + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { + // Return a network error. + return makeNetworkError('URL scheme must be a HTTP(S) scheme') + } - // 2. Append name to serialization. - serialization += name + // - request’s use-CORS-preflight flag is set + // - request’s unsafe-request flag is set and either request’s method is + // not a CORS-safelisted method or CORS-unsafe request-header names with + // request’s header list is not empty + // 1. Set request’s response tainting to "cors". + // 2. Let corsWithPreflightResponse be the result of running HTTP fetch + // given fetchParams and true. + // 3. If corsWithPreflightResponse is a network error, then clear cache + // entries using request. + // 4. Return corsWithPreflightResponse. + // TODO - // 3. Append U+003D (=) to serialization. - serialization += '=' + // Otherwise + // 1. Set request’s response tainting to "cors". + request.responseTainting = 'cors' - // 4. If value does not solely contain HTTP token code - // points or value is the empty string, then: - if (!HTTP_TOKEN_CODEPOINTS.test(value)) { - // 1. Precede each occurence of U+0022 (") or - // U+005C (\) in value with U+005C (\). - value = value.replace(/(\\|")/g, '\\$1') + // 2. Return the result of running HTTP fetch given fetchParams. + return await httpFetch(fetchParams) + })() + } - // 2. Prepend U+0022 (") to value. - value = '"' + value + // 12. If recursive is true, then return response. + if (recursive) { + return response + } - // 3. Append U+0022 (") to value. - value += '"' + // 13. If response is not a network error and response is not a filtered + // response, then: + if (response.status !== 0 && !response.internalResponse) { + // If request’s response tainting is "cors", then: + if (request.responseTainting === 'cors') { + // 1. Let headerNames be the result of extracting header list values + // given `Access-Control-Expose-Headers` and response’s header list. + // TODO + // 2. If request’s credentials mode is not "include" and headerNames + // contains `*`, then set response’s CORS-exposed header-name list to + // all unique header names in response’s header list. + // TODO + // 3. Otherwise, if headerNames is not null or failure, then set + // response’s CORS-exposed header-name list to headerNames. + // TODO } - // 5. Append value to serialization. - serialization += value + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (request.responseTainting === 'basic') { + response = filterResponse(response, 'basic') + } else if (request.responseTainting === 'cors') { + response = filterResponse(response, 'cors') + } else if (request.responseTainting === 'opaque') { + response = filterResponse(response, 'opaque') + } else { + assert(false) + } } - // 3. Return serialization. - return serialization -} + // 14. Let internalResponse be response, if response is a network error, + // and response’s internal response otherwise. + let internalResponse = + response.status === 0 ? response : response.internalResponse -/** - * @see https://fetch.spec.whatwg.org/#http-whitespace - * @param {string} char - */ -function isHTTPWhiteSpace (char) { - return char === '\r' || char === '\n' || char === '\t' || char === ' ' -} + // 15. If internalResponse’s URL list is empty, then set it to a clone of + // request’s URL list. + if (internalResponse.urlList.length === 0) { + internalResponse.urlList.push(...request.urlList) + } -/** - * @see https://fetch.spec.whatwg.org/#http-whitespace - * @param {string} str - */ -function removeHTTPWhitespace (str, leading = true, trailing = true) { - let lead = 0 - let trail = str.length - 1 + // 16. If request’s timing allow failed flag is unset, then set + // internalResponse’s timing allow passed flag. + if (!request.timingAllowFailed) { + response.timingAllowPassed = true + } - if (leading) { - for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); + // 17. If response is not a network error and any of the following returns + // blocked + // - should internalResponse to request be blocked as mixed content + // - should internalResponse to request be blocked by Content Security Policy + // - should internalResponse to request be blocked due to its MIME type + // - should internalResponse to request be blocked due to nosniff + // TODO + + // 18. If response’s type is "opaque", internalResponse’s status is 206, + // internalResponse’s range-requested flag is set, and request’s header + // list does not contain `Range`, then set response and internalResponse + // to a network error. + if ( + response.type === 'opaque' && + internalResponse.status === 206 && + internalResponse.rangeRequested && + !request.headers.contains('range') + ) { + response = internalResponse = makeNetworkError() } - if (trailing) { - for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); + // 19. If response is not a network error and either request’s method is + // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, + // set internalResponse’s body to null and disregard any enqueuing toward + // it (if any). + if ( + response.status !== 0 && + (request.method === 'HEAD' || + request.method === 'CONNECT' || + nullBodyStatus.includes(internalResponse.status)) + ) { + internalResponse.body = null + fetchParams.controller.dump = true } - return str.slice(lead, trail + 1) -} + // 20. If request’s integrity metadata is not the empty string, then: + if (request.integrity) { + // 1. Let processBodyError be this step: run fetch finale given fetchParams + // and a network error. + const processBodyError = (reason) => + fetchFinale(fetchParams, makeNetworkError(reason)) -/** - * @see https://infra.spec.whatwg.org/#ascii-whitespace - * @param {string} char - */ -function isASCIIWhitespace (char) { - return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' -} + // 2. If request’s response tainting is "opaque", or response’s body is null, + // then run processBodyError and abort these steps. + if (request.responseTainting === 'opaque' || response.body == null) { + processBodyError(response.error) + return + } -/** - * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace - */ -function removeASCIIWhitespace (str, leading = true, trailing = true) { - let lead = 0 - let trail = str.length - 1 + // 3. Let processBody given bytes be these steps: + const processBody = (bytes) => { + // 1. If bytes do not match request’s integrity metadata, + // then run processBodyError and abort these steps. [SRI] + if (!bytesMatch(bytes, request.integrity)) { + processBodyError('integrity mismatch') + return + } - if (leading) { - for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); - } + // 2. Set response’s body to bytes as a body. + response.body = safelyExtractBody(bytes)[0] - if (trailing) { - for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); - } + // 3. Run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } - return str.slice(lead, trail + 1) + // 4. Fully read response’s body given processBody and processBodyError. + await fullyReadBody(response.body, processBody, processBodyError) + } else { + // 21. Otherwise, run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } } -module.exports = { - dataURLProcessor, - URLSerializer, - collectASequenceOfCodePoints, - collectASequenceOfCodePointsFast, - stringPercentDecode, - parseMIMEType, - collectAnHTTPQuotedString, - serializeAMimeType -} +// https://fetch.spec.whatwg.org/#concept-scheme-fetch +// given a fetch params fetchParams +function schemeFetch (fetchParams) { + // Note: since the connection is destroyed on redirect, which sets fetchParams to a + // cancelled state, we do not want this condition to trigger *unless* there have been + // no redirects. See https://github.com/nodejs/undici/issues/1776 + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) + } + // 2. Let request be fetchParams’s request. + const { request } = fetchParams -/***/ }), + const { protocol: scheme } = requestCurrentURL(request) -/***/ 8511: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 3. Switch on request’s current URL’s scheme and run the associated steps: + switch (scheme) { + case 'about:': { + // If request’s current URL’s path is the string "blank", then return a new response + // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », + // and body is the empty byte sequence as a body. -"use strict"; + // Otherwise, return a network error. + return Promise.resolve(makeNetworkError('about scheme is not supported')) + } + case 'blob:': { + if (!resolveObjectURL) { + resolveObjectURL = (__nccwpck_require__(14300).resolveObjectURL) + } + // 1. Let blobURLEntry be request’s current URL’s blob URL entry. + const blobURLEntry = requestCurrentURL(request) -const { Blob, File: NativeFile } = __nccwpck_require__(4300) -const { types } = __nccwpck_require__(3837) -const { kState } = __nccwpck_require__(5861) -const { isBlobLike } = __nccwpck_require__(2538) -const { webidl } = __nccwpck_require__(1744) -const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -const { kEnumerableProperty } = __nccwpck_require__(3983) -const encoder = new TextEncoder() + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + // Buffer.resolveObjectURL does not ignore URL queries. + if (blobURLEntry.search.length !== 0) { + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) + } -class File extends Blob { - constructor (fileBits, fileName, options = {}) { - // The File constructor is invoked with two or three parameters, depending - // on whether the optional dictionary parameter is used. When the File() - // constructor is invoked, user agents must run the following steps: - webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) - fileBits = webidl.converters['sequence'](fileBits) - fileName = webidl.converters.USVString(fileName) - options = webidl.converters.FilePropertyBag(options) + // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s + // object is not a Blob object, then return a network error. + if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { + return Promise.resolve(makeNetworkError('invalid method')) + } - // 1. Let bytes be the result of processing blob parts given fileBits and - // options. - // Note: Blob handles this for us + // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. + const bodyWithType = safelyExtractBody(blobURLEntryObject) - // 2. Let n be the fileName argument to the constructor. - const n = fileName + // 4. Let body be bodyWithType’s body. + const body = bodyWithType[0] - // 3. Process FilePropertyBag dictionary argument by running the following - // substeps: + // 5. Let length be body’s length, serialized and isomorphic encoded. + const length = isomorphicEncode(`${body.length}`) - // 1. If the type member is provided and is not the empty string, let t - // be set to the type dictionary member. If t contains any characters - // outside the range U+0020 to U+007E, then set t to the empty string - // and return from these substeps. - // 2. Convert every character in t to ASCII lowercase. - let t = options.type - let d + // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. + const type = bodyWithType[1] ?? '' - // eslint-disable-next-line no-labels - substep: { - if (t) { - t = parseMIMEType(t) + // 7. Return a new response whose status message is `OK`, header list is + // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. + const response = makeResponse({ + statusText: 'OK', + headersList: [ + ['content-length', { name: 'Content-Length', value: length }], + ['content-type', { name: 'Content-Type', value: type }] + ] + }) - if (t === 'failure') { - t = '' - // eslint-disable-next-line no-labels - break substep - } + response.body = body - t = serializeAMimeType(t).toLowerCase() + return Promise.resolve(response) + } + case 'data:': { + // 1. Let dataURLStruct be the result of running the + // data: URL processor on request’s current URL. + const currentURL = requestCurrentURL(request) + const dataURLStruct = dataURLProcessor(currentURL) + + // 2. If dataURLStruct is failure, then return a + // network error. + if (dataURLStruct === 'failure') { + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) } - // 3. If the lastModified member is provided, let d be set to the - // lastModified dictionary member. If it is not provided, set d to the - // current date and time represented as the number of milliseconds since - // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). - d = options.lastModified - } + // 3. Let mimeType be dataURLStruct’s MIME type, serialized. + const mimeType = serializeAMimeType(dataURLStruct.mimeType) - // 4. Return a new File object F such that: - // F refers to the bytes byte sequence. - // F.size is set to the number of total bytes in bytes. - // F.name is set to n. - // F.type is set to t. - // F.lastModified is set to d. + // 4. Return a response whose status message is `OK`, + // header list is « (`Content-Type`, mimeType) », + // and body is dataURLStruct’s body as a body. + return Promise.resolve(makeResponse({ + statusText: 'OK', + headersList: [ + ['content-type', { name: 'Content-Type', value: mimeType }] + ], + body: safelyExtractBody(dataURLStruct.body)[0] + })) + } + case 'file:': { + // For now, unfortunate as it is, file URLs are left as an exercise for the reader. + // When in doubt, return a network error. + return Promise.resolve(makeNetworkError('not implemented... yet...')) + } + case 'http:': + case 'https:': { + // Return the result of running HTTP fetch given fetchParams. - super(processBlobParts(fileBits, options), { type: t }) - this[kState] = { - name: n, - lastModified: d, - type: t + return httpFetch(fetchParams) + .catch((err) => makeNetworkError(err)) + } + default: { + return Promise.resolve(makeNetworkError('unknown scheme')) } } +} - get name () { - webidl.brandCheck(this, File) +// https://fetch.spec.whatwg.org/#finalize-response +function finalizeResponse (fetchParams, response) { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true - return this[kState].name + // 2, If fetchParams’s process response done is not null, then queue a fetch + // task to run fetchParams’s process response done given response, with + // fetchParams’s task destination. + if (fetchParams.processResponseDone != null) { + queueMicrotask(() => fetchParams.processResponseDone(response)) } +} - get lastModified () { - webidl.brandCheck(this, File) +// https://fetch.spec.whatwg.org/#fetch-finale +function fetchFinale (fetchParams, response) { + // 1. If response is a network error, then: + if (response.type === 'error') { + // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». + response.urlList = [fetchParams.request.urlList[0]] - return this[kState].lastModified + // 2. Set response’s timing info to the result of creating an opaque timing + // info for fetchParams’s timing info. + response.timingInfo = createOpaqueTimingInfo({ + startTime: fetchParams.timingInfo.startTime + }) } - get type () { - webidl.brandCheck(this, File) + // 2. Let processResponseEndOfBody be the following steps: + const processResponseEndOfBody = () => { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true - return this[kState].type + // If fetchParams’s process response end-of-body is not null, + // then queue a fetch task to run fetchParams’s process response + // end-of-body given response with fetchParams’s task destination. + if (fetchParams.processResponseEndOfBody != null) { + queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) + } } -} - -class FileLike { - constructor (blobLike, fileName, options = {}) { - // TODO: argument idl type check - // The File constructor is invoked with two or three parameters, depending - // on whether the optional dictionary parameter is used. When the File() - // constructor is invoked, user agents must run the following steps: + // 3. If fetchParams’s process response is non-null, then queue a fetch task + // to run fetchParams’s process response given response, with fetchParams’s + // task destination. + if (fetchParams.processResponse != null) { + queueMicrotask(() => fetchParams.processResponse(response)) + } - // 1. Let bytes be the result of processing blob parts given fileBits and - // options. + // 4. If response’s body is null, then run processResponseEndOfBody. + if (response.body == null) { + processResponseEndOfBody() + } else { + // 5. Otherwise: - // 2. Let n be the fileName argument to the constructor. - const n = fileName + // 1. Let transformStream be a new a TransformStream. - // 3. Process FilePropertyBag dictionary argument by running the following - // substeps: + // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, + // enqueues chunk in transformStream. + const identityTransformAlgorithm = (chunk, controller) => { + controller.enqueue(chunk) + } - // 1. If the type member is provided and is not the empty string, let t - // be set to the type dictionary member. If t contains any characters - // outside the range U+0020 to U+007E, then set t to the empty string - // and return from these substeps. - // TODO - const t = options.type + // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm + // and flushAlgorithm set to processResponseEndOfBody. + const transformStream = new TransformStream({ + start () {}, + transform: identityTransformAlgorithm, + flush: processResponseEndOfBody + }, { + size () { + return 1 + } + }, { + size () { + return 1 + } + }) - // 2. Convert every character in t to ASCII lowercase. - // TODO + // 4. Set response’s body to the result of piping response’s body through transformStream. + response.body = { stream: response.body.stream.pipeThrough(transformStream) } + } - // 3. If the lastModified member is provided, let d be set to the - // lastModified dictionary member. If it is not provided, set d to the - // current date and time represented as the number of milliseconds since - // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). - const d = options.lastModified ?? Date.now() + // 6. If fetchParams’s process response consume body is non-null, then: + if (fetchParams.processResponseConsumeBody != null) { + // 1. Let processBody given nullOrBytes be this step: run fetchParams’s + // process response consume body given response and nullOrBytes. + const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) - // 4. Return a new File object F such that: - // F refers to the bytes byte sequence. - // F.size is set to the number of total bytes in bytes. - // F.name is set to n. - // F.type is set to t. - // F.lastModified is set to d. + // 2. Let processBodyError be this step: run fetchParams’s process + // response consume body given response and failure. + const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) - this[kState] = { - blobLike, - name: n, - type: t, - lastModified: d + // 3. If response’s body is null, then queue a fetch task to run processBody + // given null, with fetchParams’s task destination. + if (response.body == null) { + queueMicrotask(() => processBody(null)) + } else { + // 4. Otherwise, fully read response’s body given processBody, processBodyError, + // and fetchParams’s task destination. + return fullyReadBody(response.body, processBody, processBodyError) } + return Promise.resolve() } +} - stream (...args) { - webidl.brandCheck(this, FileLike) +// https://fetch.spec.whatwg.org/#http-fetch +async function httpFetch (fetchParams) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - return this[kState].blobLike.stream(...args) - } + // 2. Let response be null. + let response = null - arrayBuffer (...args) { - webidl.brandCheck(this, FileLike) + // 3. Let actualResponse be null. + let actualResponse = null - return this[kState].blobLike.arrayBuffer(...args) + // 4. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 5. If request’s service-workers mode is "all", then: + if (request.serviceWorkers === 'all') { + // TODO } - slice (...args) { - webidl.brandCheck(this, FileLike) + // 6. If response is null, then: + if (response === null) { + // 1. If makeCORSPreflight is true and one of these conditions is true: + // TODO - return this[kState].blobLike.slice(...args) - } + // 2. If request’s redirect mode is "follow", then set request’s + // service-workers mode to "none". + if (request.redirect === 'follow') { + request.serviceWorkers = 'none' + } - text (...args) { - webidl.brandCheck(this, FileLike) + // 3. Set response and actualResponse to the result of running + // HTTP-network-or-cache fetch given fetchParams. + actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) - return this[kState].blobLike.text(...args) - } + // 4. If request’s response tainting is "cors" and a CORS check + // for request and response returns failure, then return a network error. + if ( + request.responseTainting === 'cors' && + corsCheck(request, response) === 'failure' + ) { + return makeNetworkError('cors failure') + } - get size () { - webidl.brandCheck(this, FileLike) + // 5. If the TAO check for request and response returns failure, then set + // request’s timing allow failed flag. + if (TAOCheck(request, response) === 'failure') { + request.timingAllowFailed = true + } + } - return this[kState].blobLike.size + // 7. If either request’s response tainting or response’s type + // is "opaque", and the cross-origin resource policy check with + // request’s origin, request’s client, request’s destination, + // and actualResponse returns blocked, then return a network error. + if ( + (request.responseTainting === 'opaque' || response.type === 'opaque') && + crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, + actualResponse + ) === 'blocked' + ) { + return makeNetworkError('blocked') } - get type () { - webidl.brandCheck(this, FileLike) + // 8. If actualResponse’s status is a redirect status, then: + if (redirectStatusSet.has(actualResponse.status)) { + // 1. If actualResponse’s status is not 303, request’s body is not null, + // and the connection uses HTTP/2, then user agents may, and are even + // encouraged to, transmit an RST_STREAM frame. + // See, https://github.com/whatwg/fetch/issues/1288 + if (request.redirect !== 'manual') { + fetchParams.controller.connection.destroy() + } - return this[kState].blobLike.type + // 2. Switch on request’s redirect mode: + if (request.redirect === 'error') { + // Set response to a network error. + response = makeNetworkError('unexpected redirect') + } else if (request.redirect === 'manual') { + // Set response to an opaque-redirect filtered response whose internal + // response is actualResponse. + // NOTE(spec): On the web this would return an `opaqueredirect` response, + // but that doesn't make sense server side. + // See https://github.com/nodejs/undici/issues/1193. + response = actualResponse + } else if (request.redirect === 'follow') { + // Set response to the result of running HTTP-redirect fetch given + // fetchParams and response. + response = await httpRedirectFetch(fetchParams, response) + } else { + assert(false) + } } - get name () { - webidl.brandCheck(this, FileLike) + // 9. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo - return this[kState].name - } + // 10. Return response. + return response +} - get lastModified () { - webidl.brandCheck(this, FileLike) +// https://fetch.spec.whatwg.org/#http-redirect-fetch +function httpRedirectFetch (fetchParams, response) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - return this[kState].lastModified + // 2. Let actualResponse be response, if response is not a filtered response, + // and response’s internal response otherwise. + const actualResponse = response.internalResponse + ? response.internalResponse + : response + + // 3. Let locationURL be actualResponse’s location URL given request’s current + // URL’s fragment. + let locationURL + + try { + locationURL = responseLocationURL( + actualResponse, + requestCurrentURL(request).hash + ) + + // 4. If locationURL is null, then return response. + if (locationURL == null) { + return response + } + } catch (err) { + // 5. If locationURL is failure, then return a network error. + return Promise.resolve(makeNetworkError(err)) } - get [Symbol.toStringTag] () { - return 'File' + // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network + // error. + if (!urlIsHttpHttpsScheme(locationURL)) { + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) } -} -Object.defineProperties(File.prototype, { - [Symbol.toStringTag]: { - value: 'File', - configurable: true - }, - name: kEnumerableProperty, - lastModified: kEnumerableProperty -}) + // 7. If request’s redirect count is 20, then return a network error. + if (request.redirectCount === 20) { + return Promise.resolve(makeNetworkError('redirect count exceeded')) + } -webidl.converters.Blob = webidl.interfaceConverter(Blob) + // 8. Increase request’s redirect count by 1. + request.redirectCount += 1 -webidl.converters.BlobPart = function (V, opts) { - if (webidl.util.Type(V) === 'Object') { - if (isBlobLike(V)) { - return webidl.converters.Blob(V, { strict: false }) - } + // 9. If request’s mode is "cors", locationURL includes credentials, and + // request’s origin is not same origin with locationURL’s origin, then return + // a network error. + if ( + request.mode === 'cors' && + (locationURL.username || locationURL.password) && + !sameOrigin(request, locationURL) + ) { + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) + } - if ( - ArrayBuffer.isView(V) || - types.isAnyArrayBuffer(V) - ) { - return webidl.converters.BufferSource(V, opts) - } + // 10. If request’s response tainting is "cors" and locationURL includes + // credentials, then return a network error. + if ( + request.responseTainting === 'cors' && + (locationURL.username || locationURL.password) + ) { + return Promise.resolve(makeNetworkError( + 'URL cannot contain credentials for request mode "cors"' + )) } - return webidl.converters.USVString(V, opts) -} + // 11. If actualResponse’s status is not 303, request’s body is non-null, + // and request’s body’s source is null, then return a network error. + if ( + actualResponse.status !== 303 && + request.body != null && + request.body.source == null + ) { + return Promise.resolve(makeNetworkError()) + } -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.BlobPart -) + // 12. If one of the following is true + // - actualResponse’s status is 301 or 302 and request’s method is `POST` + // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` + if ( + ([301, 302].includes(actualResponse.status) && request.method === 'POST') || + (actualResponse.status === 303 && + !GET_OR_HEAD.includes(request.method)) + ) { + // then: + // 1. Set request’s method to `GET` and request’s body to null. + request.method = 'GET' + request.body = null -// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag -webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ - { - key: 'lastModified', - converter: webidl.converters['long long'], - get defaultValue () { - return Date.now() + // 2. For each headerName of request-body-header name, delete headerName from + // request’s header list. + for (const headerName of requestBodyHeader) { + request.headersList.delete(headerName) } - }, - { - key: 'type', - converter: webidl.converters.DOMString, - defaultValue: '' - }, - { - key: 'endings', - converter: (value) => { - value = webidl.converters.DOMString(value) - value = value.toLowerCase() - - if (value !== 'native') { - value = 'transparent' - } - - return value - }, - defaultValue: 'transparent' } -]) -/** - * @see https://www.w3.org/TR/FileAPI/#process-blob-parts - * @param {(NodeJS.TypedArray|Blob|string)[]} parts - * @param {{ type: string, endings: string }} options - */ -function processBlobParts (parts, options) { - // 1. Let bytes be an empty sequence of bytes. - /** @type {NodeJS.TypedArray[]} */ - const bytes = [] + // 13. If request’s current URL’s origin is not same origin with locationURL’s + // origin, then for each headerName of CORS non-wildcard request-header name, + // delete headerName from request’s header list. + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name + request.headersList.delete('authorization') - // 2. For each element in parts: - for (const element of parts) { - // 1. If element is a USVString, run the following substeps: - if (typeof element === 'string') { - // 1. Let s be element. - let s = element + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) - // 2. If the endings member of options is "native", set s - // to the result of converting line endings to native - // of element. - if (options.endings === 'native') { - s = convertLineEndingsNative(s) - } + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. + request.headersList.delete('cookie') + request.headersList.delete('host') + } - // 3. Append the result of UTF-8 encoding s to bytes. - bytes.push(encoder.encode(s)) - } else if ( - types.isAnyArrayBuffer(element) || - types.isTypedArray(element) - ) { - // 2. If element is a BufferSource, get a copy of the - // bytes held by the buffer source, and append those - // bytes to bytes. - if (!element.buffer) { // ArrayBuffer - bytes.push(new Uint8Array(element)) - } else { - bytes.push( - new Uint8Array(element.buffer, element.byteOffset, element.byteLength) - ) - } - } else if (isBlobLike(element)) { - // 3. If element is a Blob, append the bytes it represents - // to bytes. - bytes.push(element) - } + // 14. If request’s body is non-null, then set request’s body to the first return + // value of safely extracting request’s body’s source. + if (request.body != null) { + assert(request.body.source != null) + request.body = safelyExtractBody(request.body.source)[0] } - // 3. Return bytes. - return bytes -} + // 15. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo -/** - * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native - * @param {string} s - */ -function convertLineEndingsNative (s) { - // 1. Let native line ending be be the code point U+000A LF. - let nativeLineEnding = '\n' + // 16. Set timingInfo’s redirect end time and post-redirect start time to the + // coarsened shared current time given fetchParams’s cross-origin isolated + // capability. + timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = + coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) - // 2. If the underlying platform’s conventions are to - // represent newlines as a carriage return and line feed - // sequence, set native line ending to the code point - // U+000D CR followed by the code point U+000A LF. - if (process.platform === 'win32') { - nativeLineEnding = '\r\n' + // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s + // redirect start time to timingInfo’s start time. + if (timingInfo.redirectStartTime === 0) { + timingInfo.redirectStartTime = timingInfo.startTime } - return s.replace(/\r?\n/g, nativeLineEnding) -} + // 18. Append locationURL to request’s URL list. + request.urlList.push(locationURL) + + // 19. Invoke set request’s referrer policy on redirect on request and + // actualResponse. + setRequestReferrerPolicyOnRedirect(request, actualResponse) -// If this function is moved to ./util.js, some tools (such as -// rollup) will warn about circular dependencies. See: -// https://github.com/nodejs/undici/issues/1629 -function isFileLike (object) { - return ( - (NativeFile && object instanceof NativeFile) || - object instanceof File || ( - object && - (typeof object.stream === 'function' || - typeof object.arrayBuffer === 'function') && - object[Symbol.toStringTag] === 'File' - ) - ) + // 20. Return the result of running main fetch given fetchParams and true. + return mainFetch(fetchParams, true) } -module.exports = { File, FileLike, isFileLike } +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +async function httpNetworkOrCacheFetch ( + fetchParams, + isAuthenticationFetch = false, + isNewConnectionFetch = false +) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + // 2. Let httpFetchParams be null. + let httpFetchParams = null -/***/ }), + // 3. Let httpRequest be null. + let httpRequest = null -/***/ 2015: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 4. Let response be null. + let response = null -"use strict"; + // 5. Let storedResponse be null. + // TODO: cache + // 6. Let httpCache be null. + const httpCache = null -const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(2538) -const { kState } = __nccwpck_require__(5861) -const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(8511) -const { webidl } = __nccwpck_require__(1744) -const { Blob, File: NativeFile } = __nccwpck_require__(4300) + // 7. Let the revalidatingFlag be unset. + const revalidatingFlag = false -/** @type {globalThis['File']} */ -const File = NativeFile ?? UndiciFile + // 8. Run these steps, but abort when the ongoing fetch is terminated: -// https://xhr.spec.whatwg.org/#formdata -class FormData { - constructor (form) { - if (form !== undefined) { - throw webidl.errors.conversionFailed({ - prefix: 'FormData constructor', - argument: 'Argument 1', - types: ['undefined'] - }) - } + // 1. If request’s window is "no-window" and request’s redirect mode is + // "error", then set httpFetchParams to fetchParams and httpRequest to + // request. + if (request.window === 'no-window' && request.redirect === 'error') { + httpFetchParams = fetchParams + httpRequest = request + } else { + // Otherwise: - this[kState] = [] - } + // 1. Set httpRequest to a clone of request. + httpRequest = makeRequest(request) - append (name, value, filename = undefined) { - webidl.brandCheck(this, FormData) + // 2. Set httpFetchParams to a copy of fetchParams. + httpFetchParams = { ...fetchParams } - webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + // 3. Set httpFetchParams’s request to httpRequest. + httpFetchParams.request = httpRequest + } - if (arguments.length === 3 && !isBlobLike(value)) { - throw new TypeError( - "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" - ) - } + // 3. Let includeCredentials be true if one of + const includeCredentials = + request.credentials === 'include' || + (request.credentials === 'same-origin' && + request.responseTainting === 'basic') - // 1. Let value be value if given; otherwise blobValue. + // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s + // body is non-null; otherwise null. + const contentLength = httpRequest.body ? httpRequest.body.length : null - name = webidl.converters.USVString(name) - value = isBlobLike(value) - ? webidl.converters.Blob(value, { strict: false }) - : webidl.converters.USVString(value) - filename = arguments.length === 3 - ? webidl.converters.USVString(filename) - : undefined + // 5. Let contentLengthHeaderValue be null. + let contentLengthHeaderValue = null - // 2. Let entry be the result of creating an entry with - // name, value, and filename if given. - const entry = makeEntry(name, value, filename) + // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or + // `PUT`, then set contentLengthHeaderValue to `0`. + if ( + httpRequest.body == null && + ['POST', 'PUT'].includes(httpRequest.method) + ) { + contentLengthHeaderValue = '0' + } - // 3. Append entry to this’s entry list. - this[kState].push(entry) + // 7. If contentLength is non-null, then set contentLengthHeaderValue to + // contentLength, serialized and isomorphic encoded. + if (contentLength != null) { + contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) } - delete (name) { - webidl.brandCheck(this, FormData) + // 8. If contentLengthHeaderValue is non-null, then append + // `Content-Length`/contentLengthHeaderValue to httpRequest’s header + // list. + if (contentLengthHeaderValue != null) { + httpRequest.headersList.append('content-length', contentLengthHeaderValue) + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, + // contentLengthHeaderValue) to httpRequest’s header list. - name = webidl.converters.USVString(name) + // 10. If contentLength is non-null and httpRequest’s keepalive is true, + // then: + if (contentLength != null && httpRequest.keepalive) { + // NOTE: keepalive is a noop outside of browser context. + } - // The delete(name) method steps are to remove all entries whose name - // is name from this’s entry list. - this[kState] = this[kState].filter(entry => entry.name !== name) + // 11. If httpRequest’s referrer is a URL, then append + // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, + // to httpRequest’s header list. + if (httpRequest.referrer instanceof URL) { + httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) } - get (name) { - webidl.brandCheck(this, FormData) + // 12. Append a request `Origin` header for httpRequest. + appendRequestOriginHeader(httpRequest) - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) + // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] + appendFetchMetadata(httpRequest) - name = webidl.converters.USVString(name) + // 14. If httpRequest’s header list does not contain `User-Agent`, then + // user agents should append `User-Agent`/default `User-Agent` value to + // httpRequest’s header list. + if (!httpRequest.headersList.contains('user-agent')) { + httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') + } - // 1. If there is no entry whose name is name in this’s entry list, - // then return null. - const idx = this[kState].findIndex((entry) => entry.name === name) - if (idx === -1) { - return null - } + // 15. If httpRequest’s cache mode is "default" and httpRequest’s header + // list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set + // httpRequest’s cache mode to "no-store". + if ( + httpRequest.cache === 'default' && + (httpRequest.headersList.contains('if-modified-since') || + httpRequest.headersList.contains('if-none-match') || + httpRequest.headersList.contains('if-unmodified-since') || + httpRequest.headersList.contains('if-match') || + httpRequest.headersList.contains('if-range')) + ) { + httpRequest.cache = 'no-store' + } - // 2. Return the value of the first entry whose name is name from - // this’s entry list. - return this[kState][idx].value + // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent + // no-cache cache-control header modification flag is unset, and + // httpRequest’s header list does not contain `Cache-Control`, then append + // `Cache-Control`/`max-age=0` to httpRequest’s header list. + if ( + httpRequest.cache === 'no-cache' && + !httpRequest.preventNoCacheCacheControlHeaderModification && + !httpRequest.headersList.contains('cache-control') + ) { + httpRequest.headersList.append('cache-control', 'max-age=0') } - getAll (name) { - webidl.brandCheck(this, FormData) + // 17. If httpRequest’s cache mode is "no-store" or "reload", then: + if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { + // 1. If httpRequest’s header list does not contain `Pragma`, then append + // `Pragma`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('pragma')) { + httpRequest.headersList.append('pragma', 'no-cache') + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) + // 2. If httpRequest’s header list does not contain `Cache-Control`, + // then append `Cache-Control`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('cache-control')) { + httpRequest.headersList.append('cache-control', 'no-cache') + } + } - name = webidl.converters.USVString(name) + // 18. If httpRequest’s header list contains `Range`, then append + // `Accept-Encoding`/`identity` to httpRequest’s header list. + if (httpRequest.headersList.contains('range')) { + httpRequest.headersList.append('accept-encoding', 'identity') + } - // 1. If there is no entry whose name is name in this’s entry list, - // then return the empty list. - // 2. Return the values of all entries whose name is name, in order, - // from this’s entry list. - return this[kState] - .filter((entry) => entry.name === name) - .map((entry) => entry.value) + // 19. Modify httpRequest’s header list per HTTP. Do not append a given + // header if httpRequest’s header list contains that header’s name. + // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 + if (!httpRequest.headersList.contains('accept-encoding')) { + if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { + httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') + } else { + httpRequest.headersList.append('accept-encoding', 'gzip, deflate') + } } - has (name) { - webidl.brandCheck(this, FormData) + httpRequest.headersList.delete('host') - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) + // 20. If includeCredentials is true, then: + if (includeCredentials) { + // 1. If the user agent is not configured to block cookies for httpRequest + // (see section 7 of [COOKIES]), then: + // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: + // TODO: credentials + } - name = webidl.converters.USVString(name) + // 21. If there’s a proxy-authentication entry, use it as appropriate. + // TODO: proxy-authentication - // The has(name) method steps are to return true if there is an entry - // whose name is name in this’s entry list; otherwise false. - return this[kState].findIndex((entry) => entry.name === name) !== -1 + // 22. Set httpCache to the result of determining the HTTP cache + // partition, given httpRequest. + // TODO: cache + + // 23. If httpCache is null, then set httpRequest’s cache mode to + // "no-store". + if (httpCache == null) { + httpRequest.cache = 'no-store' } - set (name, value, filename = undefined) { - webidl.brandCheck(this, FormData) + // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", + // then: + if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { + // TODO: cache + } - webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) + // 9. If aborted, then return the appropriate network error for fetchParams. + // TODO - if (arguments.length === 3 && !isBlobLike(value)) { - throw new TypeError( - "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" - ) + // 10. If response is null, then: + if (response == null) { + // 1. If httpRequest’s cache mode is "only-if-cached", then return a + // network error. + if (httpRequest.mode === 'only-if-cached') { + return makeNetworkError('only if cached') } - // The set(name, value) and set(name, blobValue, filename) method steps - // are: + // 2. Let forwardResponse be the result of running HTTP-network fetch + // given httpFetchParams, includeCredentials, and isNewConnectionFetch. + const forwardResponse = await httpNetworkFetch( + httpFetchParams, + includeCredentials, + isNewConnectionFetch + ) - // 1. Let value be value if given; otherwise blobValue. + // 3. If httpRequest’s method is unsafe and forwardResponse’s status is + // in the range 200 to 399, inclusive, invalidate appropriate stored + // responses in httpCache, as per the "Invalidation" chapter of HTTP + // Caching, and set storedResponse to null. [HTTP-CACHING] + if ( + !safeMethodsSet.has(httpRequest.method) && + forwardResponse.status >= 200 && + forwardResponse.status <= 399 + ) { + // TODO: cache + } - name = webidl.converters.USVString(name) - value = isBlobLike(value) - ? webidl.converters.Blob(value, { strict: false }) - : webidl.converters.USVString(value) - filename = arguments.length === 3 - ? toUSVString(filename) - : undefined + // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, + // then: + if (revalidatingFlag && forwardResponse.status === 304) { + // TODO: cache + } - // 2. Let entry be the result of creating an entry with name, value, and - // filename if given. - const entry = makeEntry(name, value, filename) + // 5. If response is null, then: + if (response == null) { + // 1. Set response to forwardResponse. + response = forwardResponse - // 3. If there are entries in this’s entry list whose name is name, then - // replace the first such entry with entry and remove the others. - const idx = this[kState].findIndex((entry) => entry.name === name) - if (idx !== -1) { - this[kState] = [ - ...this[kState].slice(0, idx), - entry, - ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) - ] - } else { - // 4. Otherwise, append entry to this’s entry list. - this[kState].push(entry) + // 2. Store httpRequest and forwardResponse in httpCache, as per the + // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] + // TODO: cache } } - entries () { - webidl.brandCheck(this, FormData) + // 11. Set response’s URL list to a clone of httpRequest’s URL list. + response.urlList = [...httpRequest.urlList] - return makeIterator( - () => this[kState].map(pair => [pair.name, pair.value]), - 'FormData', - 'key+value' - ) + // 12. If httpRequest’s header list contains `Range`, then set response’s + // range-requested flag. + if (httpRequest.headersList.contains('range')) { + response.rangeRequested = true } - keys () { - webidl.brandCheck(this, FormData) + // 13. Set response’s request-includes-credentials to includeCredentials. + response.requestIncludesCredentials = includeCredentials - return makeIterator( - () => this[kState].map(pair => [pair.name, pair.value]), - 'FormData', - 'key' - ) - } + // 14. If response’s status is 401, httpRequest’s response tainting is not + // "cors", includeCredentials is true, and request’s window is an environment + // settings object, then: + // TODO - values () { - webidl.brandCheck(this, FormData) + // 15. If response’s status is 407, then: + if (response.status === 407) { + // 1. If request’s window is "no-window", then return a network error. + if (request.window === 'no-window') { + return makeNetworkError() + } - return makeIterator( - () => this[kState].map(pair => [pair.name, pair.value]), - 'FormData', - 'value' - ) - } + // 2. ??? - /** - * @param {(value: string, key: string, self: FormData) => void} callbackFn - * @param {unknown} thisArg - */ - forEach (callbackFn, thisArg = globalThis) { - webidl.brandCheck(this, FormData) + // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) + // 4. Prompt the end user as appropriate in request’s window and store + // the result as a proxy-authentication entry. [HTTP-AUTH] + // TODO: Invoke some kind of callback? - if (typeof callbackFn !== 'function') { - throw new TypeError( - "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." - ) - } + // 5. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams. + // TODO + return makeNetworkError('proxy authentication required') + } - for (const [key, value] of this) { - callbackFn.apply(thisArg, [value, key, this]) + // 16. If all of the following are true + if ( + // response’s status is 421 + response.status === 421 && + // isNewConnectionFetch is false + !isNewConnectionFetch && + // request’s body is null, or request’s body is non-null and request’s body’s source is non-null + (request.body == null || request.body.source != null) + ) { + // then: + + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) } + + // 2. Set response to the result of running HTTP-network-or-cache + // fetch given fetchParams, isAuthenticationFetch, and true. + + // TODO (spec): The spec doesn't specify this but we need to cancel + // the active response before we can start a new one. + // https://github.com/whatwg/fetch/issues/1293 + fetchParams.controller.connection.destroy() + + response = await httpNetworkOrCacheFetch( + fetchParams, + isAuthenticationFetch, + true + ) + } + + // 17. If isAuthenticationFetch is true, then create an authentication entry + if (isAuthenticationFetch) { + // TODO } + + // 18. Return response. + return response } -FormData.prototype[Symbol.iterator] = FormData.prototype.entries +// https://fetch.spec.whatwg.org/#http-network-fetch +async function httpNetworkFetch ( + fetchParams, + includeCredentials = false, + forceNewConnection = false +) { + assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) -Object.defineProperties(FormData.prototype, { - [Symbol.toStringTag]: { - value: 'FormData', - configurable: true + fetchParams.controller.connection = { + abort: null, + destroyed: false, + destroy (err) { + if (!this.destroyed) { + this.destroyed = true + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } + } } -}) -/** - * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry - * @param {string} name - * @param {string|Blob} value - * @param {?string} filename - * @returns - */ -function makeEntry (name, value, filename) { - // 1. Set name to the result of converting name into a scalar value string. - // "To convert a string into a scalar value string, replace any surrogates - // with U+FFFD." - // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end - name = Buffer.from(name).toString('utf8') + // 1. Let request be fetchParams’s request. + const request = fetchParams.request - // 2. If value is a string, then set value to the result of converting - // value into a scalar value string. - if (typeof value === 'string') { - value = Buffer.from(value).toString('utf8') - } else { - // 3. Otherwise: + // 2. Let response be null. + let response = null - // 1. If value is not a File object, then set value to a new File object, - // representing the same bytes, whose name attribute value is "blob" - if (!isFileLike(value)) { - value = value instanceof Blob - ? new File([value], 'blob', { type: value.type }) - : new FileLike(value, 'blob', { type: value.type }) - } + // 3. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo - // 2. If filename is given, then set value to a new File object, - // representing the same bytes, whose name attribute is filename. - if (filename !== undefined) { - /** @type {FilePropertyBag} */ - const options = { - type: value.type, - lastModified: value.lastModified - } + // 4. Let httpCache be the result of determining the HTTP cache partition, + // given request. + // TODO: cache + const httpCache = null - value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile - ? new File([value], filename, options) - : new FileLike(value, filename, options) - } + // 5. If httpCache is null, then set request’s cache mode to "no-store". + if (httpCache == null) { + request.cache = 'no-store' } - // 4. Return an entry whose name is name and whose value is value. - return { name, value } -} + // 6. Let networkPartitionKey be the result of determining the network + // partition key given request. + // TODO -module.exports = { FormData } + // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise + // "no". + const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars + // 8. Switch on request’s mode: + if (request.mode === 'websocket') { + // Let connection be the result of obtaining a WebSocket connection, + // given request’s current URL. + // TODO + } else { + // Let connection be the result of obtaining a connection, given + // networkPartitionKey, request’s current URL’s origin, + // includeCredentials, and forceNewConnection. + // TODO + } -/***/ }), + // 9. Run these steps, but abort when the ongoing fetch is terminated: -/***/ 1246: -/***/ ((module) => { + // 1. If connection is failure, then return a network error. -"use strict"; + // 2. Set timingInfo’s final connection timing info to the result of + // calling clamp and coarsen connection timing info with connection’s + // timing info, timingInfo’s post-redirect start time, and fetchParams’s + // cross-origin isolated capability. + // 3. If connection is not an HTTP/2 connection, request’s body is non-null, + // and request’s body’s source is null, then append (`Transfer-Encoding`, + // `chunked`) to request’s header list. -// In case of breaking changes, increase the version -// number to avoid conflicts. -const globalOrigin = Symbol.for('undici.globalOrigin.1') + // 4. Set timingInfo’s final network-request start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated + // capability. -function getGlobalOrigin () { - return globalThis[globalOrigin] -} + // 5. Set response to the result of making an HTTP request over connection + // using request with the following caveats: -function setGlobalOrigin (newOrigin) { - if (newOrigin === undefined) { - Object.defineProperty(globalThis, globalOrigin, { - value: undefined, - writable: true, - enumerable: false, - configurable: false - }) + // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] + // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] - return - } + // - If request’s body is non-null, and request’s body’s source is null, + // then the user agent may have a buffer of up to 64 kibibytes and store + // a part of request’s body in that buffer. If the user agent reads from + // request’s body beyond that buffer’s size and the user agent needs to + // resend request, then instead return a network error. - const parsedURL = new URL(newOrigin) + // - Set timingInfo’s final network-response start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated capability, + // immediately after the user agent’s HTTP parser receives the first byte + // of the response (e.g., frame header bytes for HTTP/2 or response status + // line for HTTP/1.x). - if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { - throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) - } + // - Wait until all the headers are transmitted. - Object.defineProperty(globalThis, globalOrigin, { - value: parsedURL, - writable: true, - enumerable: false, - configurable: false - }) -} + // - Any responses whose status is in the range 100 to 199, inclusive, + // and is not 101, are to be ignored, except for the purposes of setting + // timingInfo’s final network-response start time above. -module.exports = { - getGlobalOrigin, - setGlobalOrigin -} + // - If request’s header list contains `Transfer-Encoding`/`chunked` and + // response is transferred via HTTP/1.0 or older, then return a network + // error. + // - If the HTTP request results in a TLS client certificate dialog, then: -/***/ }), + // 1. If request’s window is an environment settings object, make the + // dialog available in request’s window. -/***/ 554: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 2. Otherwise, return a network error. -"use strict"; -// https://github.com/Ethan-Arrowood/undici-fetch + // To transmit request’s body body, run these steps: + let requestBody = null + // 1. If body is null and fetchParams’s process request end-of-body is + // non-null, then queue a fetch task given fetchParams’s process request + // end-of-body and fetchParams’s task destination. + if (request.body == null && fetchParams.processRequestEndOfBody) { + queueMicrotask(() => fetchParams.processRequestEndOfBody()) + } else if (request.body != null) { + // 2. Otherwise, if body is non-null: + // 1. Let processBodyChunk given bytes be these steps: + const processBodyChunk = async function * (bytes) { + // 1. If the ongoing fetch is terminated, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + // 2. Run this step in parallel: transmit bytes. + yield bytes -const { kHeadersList, kConstruct } = __nccwpck_require__(2785) -const { kGuard } = __nccwpck_require__(5861) -const { kEnumerableProperty } = __nccwpck_require__(3983) -const { - makeIterator, - isValidHeaderName, - isValidHeaderValue -} = __nccwpck_require__(2538) -const { webidl } = __nccwpck_require__(1744) -const assert = __nccwpck_require__(9491) + // 3. If fetchParams’s process request body is non-null, then run + // fetchParams’s process request body given bytes’s length. + fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) + } -const kHeadersMap = Symbol('headers map') -const kHeadersSortedMap = Symbol('headers map sorted') + // 2. Let processEndOfBody be these steps: + const processEndOfBody = () => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } -/** - * @param {number} code - */ -function isHTTPWhiteSpaceCharCode (code) { - return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 -} + // 2. If fetchParams’s process request end-of-body is non-null, + // then run fetchParams’s process request end-of-body. + if (fetchParams.processRequestEndOfBody) { + fetchParams.processRequestEndOfBody() + } + } -/** - * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize - * @param {string} potentialValue - */ -function headerValueNormalize (potentialValue) { - // To normalize a byte sequence potentialValue, remove - // any leading and trailing HTTP whitespace bytes from - // potentialValue. - let i = 0; let j = potentialValue.length + // 3. Let processBodyError given e be these steps: + const processBodyError = (e) => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } - while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j - while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i + // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. + if (e.name === 'AbortError') { + fetchParams.controller.abort() + } else { + fetchParams.controller.terminate(e) + } + } - return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) -} + // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, + // processBodyError, and fetchParams’s task destination. + requestBody = (async function * () { + try { + for await (const bytes of request.body.stream) { + yield * processBodyChunk(bytes) + } + processEndOfBody() + } catch (err) { + processBodyError(err) + } + })() + } -function fill (headers, object) { - // To fill a Headers object headers with a given object object, run these steps: + try { + // socket is only provided for websockets + const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) - // 1. If object is a sequence, then for each header in object: - // Note: webidl conversion to array has already been done. - if (Array.isArray(object)) { - for (let i = 0; i < object.length; ++i) { - const header = object[i] - // 1. If header does not contain exactly two items, then throw a TypeError. - if (header.length !== 2) { - throw webidl.errors.exception({ - header: 'Headers constructor', - message: `expected name/value pair to be length 2, found ${header.length}.` - }) - } + if (socket) { + response = makeResponse({ status, statusText, headersList, socket }) + } else { + const iterator = body[Symbol.asyncIterator]() + fetchParams.controller.next = () => iterator.next() - // 2. Append (header’s first item, header’s second item) to headers. - appendHeader(headers, header[0], header[1]) + response = makeResponse({ status, statusText, headersList }) } - } else if (typeof object === 'object' && object !== null) { - // Note: null should throw + } catch (err) { + // 10. If aborted, then: + if (err.name === 'AbortError') { + // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. + fetchParams.controller.connection.destroy() - // 2. Otherwise, object is a record, then for each key → value in object, - // append (key, value) to headers - const keys = Object.keys(object) - for (let i = 0; i < keys.length; ++i) { - appendHeader(headers, keys[i], object[keys[i]]) + // 2. Return the appropriate network error for fetchParams. + return makeAppropriateNetworkError(fetchParams, err) } - } else { - throw webidl.errors.conversionFailed({ - prefix: 'Headers constructor', - argument: 'Argument 1', - types: ['sequence>', 'record'] - }) + + return makeNetworkError(err) } -} -/** - * @see https://fetch.spec.whatwg.org/#concept-headers-append - */ -function appendHeader (headers, name, value) { - // 1. Normalize value. - value = headerValueNormalize(value) + // 11. Let pullAlgorithm be an action that resumes the ongoing fetch + // if it is suspended. + const pullAlgorithm = () => { + fetchParams.controller.resume() + } - // 2. If name is not a header name or value is not a - // header value, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value: name, - type: 'header name' - }) - } else if (!isValidHeaderValue(value)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value, - type: 'header value' - }) + // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s + // controller with reason, given reason. + const cancelAlgorithm = (reason) => { + fetchParams.controller.abort(reason) } - // 3. If headers’s guard is "immutable", then throw a TypeError. - // 4. Otherwise, if headers’s guard is "request" and name is a - // forbidden header name, return. - // Note: undici does not implement forbidden header names - if (headers[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (headers[kGuard] === 'request-no-cors') { - // 5. Otherwise, if headers’s guard is "request-no-cors": - // TODO + // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by + // the user agent. + // TODO + + // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object + // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. + // TODO + + // 15. Let stream be a new ReadableStream. + // 16. Set up stream with pullAlgorithm set to pullAlgorithm, + // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to + // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(35356).ReadableStream) } - // 6. Otherwise, if headers’s guard is "response" and name is a - // forbidden response-header name, return. + const stream = new ReadableStream( + { + async start (controller) { + fetchParams.controller.controller = controller + }, + async pull (controller) { + await pullAlgorithm(controller) + }, + async cancel (reason) { + await cancelAlgorithm(reason) + } + }, + { + highWaterMark: 0, + size () { + return 1 + } + } + ) - // 7. Append (name, value) to headers’s header list. - return headers[kHeadersList].append(name, value) + // 17. Run these steps, but abort when the ongoing fetch is terminated: - // 8. If headers’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from headers -} + // 1. Set response’s body to a new body whose stream is stream. + response.body = { stream } -class HeadersList { - /** @type {[string, string][]|null} */ - cookies = null + // 2. If response is not a network error and request’s cache mode is + // not "no-store", then update response in httpCache for request. + // TODO - constructor (init) { - if (init instanceof HeadersList) { - this[kHeadersMap] = new Map(init[kHeadersMap]) - this[kHeadersSortedMap] = init[kHeadersSortedMap] - this.cookies = init.cookies === null ? null : [...init.cookies] - } else { - this[kHeadersMap] = new Map(init) - this[kHeadersSortedMap] = null - } - } + // 3. If includeCredentials is true and the user agent is not configured + // to block cookies for request (see section 7 of [COOKIES]), then run the + // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on + // the value of each header whose name is a byte-case-insensitive match for + // `Set-Cookie` in response’s header list, if any, and request’s current URL. + // TODO - // https://fetch.spec.whatwg.org/#header-list-contains - contains (name) { - // A header list list contains a header name name if list - // contains a header whose name is a byte-case-insensitive - // match for name. - name = name.toLowerCase() + // 18. If aborted, then: + // TODO - return this[kHeadersMap].has(name) - } + // 19. Run these steps in parallel: - clear () { - this[kHeadersMap].clear() - this[kHeadersSortedMap] = null - this.cookies = null - } + // 1. Run these steps, but abort when fetchParams is canceled: + fetchParams.controller.on('terminated', onAborted) + fetchParams.controller.resume = async () => { + // 1. While true + while (true) { + // 1-3. See onData... - // https://fetch.spec.whatwg.org/#concept-header-list-append - append (name, value) { - this[kHeadersSortedMap] = null + // 4. Set bytes to the result of handling content codings given + // codings and bytes. + let bytes + let isFailure + try { + const { done, value } = await fetchParams.controller.next() - // 1. If list contains name, then set name to the first such - // header’s name. - const lowercaseName = name.toLowerCase() - const exists = this[kHeadersMap].get(lowercaseName) + if (isAborted(fetchParams)) { + break + } - // 2. Append (name, value) to list. - if (exists) { - const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' - this[kHeadersMap].set(lowercaseName, { - name: exists.name, - value: `${exists.value}${delimiter}${value}` - }) - } else { - this[kHeadersMap].set(lowercaseName, { name, value }) - } + bytes = done ? undefined : value + } catch (err) { + if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { + // zlib doesn't like empty streams. + bytes = undefined + } else { + bytes = err - if (lowercaseName === 'set-cookie') { - this.cookies ??= [] - this.cookies.push(value) + // err may be propagated from the result of calling readablestream.cancel, + // which might not be an error. https://github.com/nodejs/undici/issues/2009 + isFailure = true + } + } + + if (bytes === undefined) { + // 2. Otherwise, if the bytes transmission for response’s message + // body is done normally and stream is readable, then close + // stream, finalize response for fetchParams and response, and + // abort these in-parallel steps. + readableStreamClose(fetchParams.controller.controller) + + finalizeResponse(fetchParams, response) + + return + } + + // 5. Increase timingInfo’s decoded body size by bytes’s length. + timingInfo.decodedBodySize += bytes?.byteLength ?? 0 + + // 6. If bytes is failure, then terminate fetchParams’s controller. + if (isFailure) { + fetchParams.controller.terminate(bytes) + return + } + + // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes + // into stream. + fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + + // 8. If stream is errored, then terminate the ongoing fetch. + if (isErrored(stream)) { + fetchParams.controller.terminate() + return + } + + // 9. If stream doesn’t need more data ask the user agent to suspend + // the ongoing fetch. + if (!fetchParams.controller.controller.desiredSize) { + return + } } } - // https://fetch.spec.whatwg.org/#concept-header-list-set - set (name, value) { - this[kHeadersSortedMap] = null - const lowercaseName = name.toLowerCase() + // 2. If aborted, then: + function onAborted (reason) { + // 2. If fetchParams is aborted, then: + if (isAborted(fetchParams)) { + // 1. Set response’s aborted flag. + response.aborted = true - if (lowercaseName === 'set-cookie') { - this.cookies = [value] + // 2. If stream is readable, then error stream with the result of + // deserialize a serialized abort reason given fetchParams’s + // controller’s serialized abort reason and an + // implementation-defined realm. + if (isReadable(stream)) { + fetchParams.controller.controller.error( + fetchParams.controller.serializedAbortReason + ) + } + } else { + // 3. Otherwise, if stream is readable, error stream with a TypeError. + if (isReadable(stream)) { + fetchParams.controller.controller.error(new TypeError('terminated', { + cause: isErrorLike(reason) ? reason : undefined + })) + } } - // 1. If list contains name, then set the value of - // the first such header to value and remove the - // others. - // 2. Otherwise, append header (name, value) to list. - this[kHeadersMap].set(lowercaseName, { name, value }) + // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. + // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. + fetchParams.controller.connection.destroy() } - // https://fetch.spec.whatwg.org/#concept-header-list-delete - delete (name) { - this[kHeadersSortedMap] = null + // 20. Return response. + return response - name = name.toLowerCase() + async function dispatch ({ body }) { + const url = requestCurrentURL(request) + /** @type {import('../..').Agent} */ + const agent = fetchParams.controller.dispatcher - if (name === 'set-cookie') { - this.cookies = null - } + return new Promise((resolve, reject) => agent.dispatch( + { + path: url.pathname + url.search, + origin: url.origin, + method: request.method, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, + maxRedirections: 0, + upgrade: request.mode === 'websocket' ? 'websocket' : undefined + }, + { + body: null, + abort: null, - this[kHeadersMap].delete(name) - } + onConnect (abort) { + // TODO (fix): Do we need connection here? + const { connection } = fetchParams.controller - // https://fetch.spec.whatwg.org/#concept-header-list-get - get (name) { - const value = this[kHeadersMap].get(name.toLowerCase()) + if (connection.destroyed) { + abort(new DOMException('The operation was aborted.', 'AbortError')) + } else { + fetchParams.controller.on('terminated', abort) + this.abort = connection.abort = abort + } + }, - // 1. If list does not contain name, then return null. - // 2. Return the values of all headers in list whose name - // is a byte-case-insensitive match for name, - // separated from each other by 0x2C 0x20, in order. - return value === undefined ? null : value.value - } + onHeaders (status, headersList, resume, statusText) { + if (status < 200) { + return + } - * [Symbol.iterator] () { - // use the lowercased name - for (const [name, { value }] of this[kHeadersMap]) { - yield [name, value] - } - } + let codings = [] + let location = '' - get entries () { - const headers = {} + const headers = new Headers() - if (this[kHeadersMap].size) { - for (const { name, value } of this[kHeadersMap].values()) { - headers[name] = value - } - } + // For H2, the headers are a plain JS object + // We distinguish between them and iterate accordingly + if (Array.isArray(headersList)) { + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()) + } else if (key.toLowerCase() === 'location') { + location = val + } - return headers - } -} + headers[kHeadersList].append(key, val) + } + } else { + const keys = Object.keys(headersList) + for (const key of keys) { + const val = headersList[key] + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() + } else if (key.toLowerCase() === 'location') { + location = val + } -// https://fetch.spec.whatwg.org/#headers-class -class Headers { - constructor (init = undefined) { - if (init === kConstruct) { - return - } - this[kHeadersList] = new HeadersList() + headers[kHeadersList].append(key, val) + } + } - // The new Headers(init) constructor steps are: + this.body = new Readable({ read: resume }) - // 1. Set this’s guard to "none". - this[kGuard] = 'none' + const decoders = [] - // 2. If init is given, then fill this with init. - if (init !== undefined) { - init = webidl.converters.HeadersInit(init) - fill(this, init) - } - } + const willFollow = request.redirect === 'follow' && + location && + redirectStatusSet.has(status) - // https://fetch.spec.whatwg.org/#dom-headers-append - append (name, value) { - webidl.brandCheck(this, Headers) + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding + if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { + for (const coding of codings) { + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 + if (coding === 'x-gzip' || coding === 'gzip') { + decoders.push(zlib.createGunzip({ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) + } else if (coding === 'deflate') { + decoders.push(zlib.createInflate()) + } else if (coding === 'br') { + decoders.push(zlib.createBrotliDecompress()) + } else { + decoders.length = 0 + break + } + } + } - webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) + resolve({ + status, + statusText, + headersList: headers[kHeadersList], + body: decoders.length + ? pipeline(this.body, ...decoders, () => { }) + : this.body.on('error', () => {}) + }) - name = webidl.converters.ByteString(name) - value = webidl.converters.ByteString(value) + return true + }, - return appendHeader(this, name, value) - } + onData (chunk) { + if (fetchParams.controller.dump) { + return + } - // https://fetch.spec.whatwg.org/#dom-headers-delete - delete (name) { - webidl.brandCheck(this, Headers) + // 1. If one or more bytes have been transmitted from response’s + // message body, then: - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) + // 1. Let bytes be the transmitted bytes. + const bytes = chunk - name = webidl.converters.ByteString(name) + // 2. Let codings be the result of extracting header list values + // given `Content-Encoding` and response’s header list. + // See pullAlgorithm. - // 1. If name is not a header name, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.delete', - value: name, - type: 'header name' - }) - } + // 3. Increase timingInfo’s encoded body size by bytes’s length. + timingInfo.encodedBodySize += bytes.byteLength - // 2. If this’s guard is "immutable", then throw a TypeError. - // 3. Otherwise, if this’s guard is "request" and name is a - // forbidden header name, return. - // 4. Otherwise, if this’s guard is "request-no-cors", name - // is not a no-CORS-safelisted request-header name, and - // name is not a privileged no-CORS request-header name, - // return. - // 5. Otherwise, if this’s guard is "response" and name is - // a forbidden response-header name, return. - // Note: undici does not implement forbidden header names - if (this[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (this[kGuard] === 'request-no-cors') { - // TODO - } + // 4. See pullAlgorithm... - // 6. If this’s header list does not contain name, then - // return. - if (!this[kHeadersList].contains(name)) { - return - } + return this.body.push(bytes) + }, - // 7. Delete name from this’s header list. - // 8. If this’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from this. - this[kHeadersList].delete(name) - } + onComplete () { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } - // https://fetch.spec.whatwg.org/#dom-headers-get - get (name) { - webidl.brandCheck(this, Headers) + fetchParams.controller.ended = true - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) + this.body.push(null) + }, - name = webidl.converters.ByteString(name) + onError (error) { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } - // 1. If name is not a header name, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.get', - value: name, - type: 'header name' - }) - } + this.body?.destroy(error) - // 2. Return the result of getting name from this’s header - // list. - return this[kHeadersList].get(name) - } + fetchParams.controller.terminate(error) - // https://fetch.spec.whatwg.org/#dom-headers-has - has (name) { - webidl.brandCheck(this, Headers) + reject(error) + }, - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) + onUpgrade (status, headersList, socket) { + if (status !== 101) { + return + } - name = webidl.converters.ByteString(name) + const headers = new Headers() - // 1. If name is not a header name, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.has', - value: name, - type: 'header name' - }) - } + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') - // 2. Return true if this’s header list contains name; - // otherwise false. - return this[kHeadersList].contains(name) + headers[kHeadersList].append(key, val) + } + + resolve({ + status, + statusText: STATUS_CODES[status], + headersList: headers[kHeadersList], + socket + }) + + return true + } + } + )) } +} - // https://fetch.spec.whatwg.org/#dom-headers-set - set (name, value) { - webidl.brandCheck(this, Headers) +module.exports = { + fetch, + Fetch, + fetching, + finalizeAndReportTiming +} - webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) - name = webidl.converters.ByteString(name) - value = webidl.converters.ByteString(value) +/***/ }), - // 1. Normalize value. - value = headerValueNormalize(value) +/***/ 48359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 2. If name is not a header name or value is not a - // header value, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.set', - value: name, - type: 'header name' - }) - } else if (!isValidHeaderValue(value)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.set', - value, - type: 'header value' - }) - } +"use strict"; +/* globals AbortController */ - // 3. If this’s guard is "immutable", then throw a TypeError. - // 4. Otherwise, if this’s guard is "request" and name is a - // forbidden header name, return. - // 5. Otherwise, if this’s guard is "request-no-cors" and - // name/value is not a no-CORS-safelisted request-header, - // return. - // 6. Otherwise, if this’s guard is "response" and name is a - // forbidden response-header name, return. - // Note: undici does not implement forbidden header names - if (this[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (this[kGuard] === 'request-no-cors') { - // TODO - } - // 7. Set (name, value) in this’s header list. - // 8. If this’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from this - this[kHeadersList].set(name, value) - } - // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie - getSetCookie () { - webidl.brandCheck(this, Headers) +const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(41472) +const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(10554) +const { FinalizationRegistry } = __nccwpck_require__(56436)() +const util = __nccwpck_require__(83983) +const { + isValidHTTPToken, + sameOrigin, + normalizeMethod, + makePolicyContainer, + normalizeMethodRecord +} = __nccwpck_require__(52538) +const { + forbiddenMethodsSet, + corsSafeListedMethodsSet, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + requestDuplex +} = __nccwpck_require__(41037) +const { kEnumerableProperty } = util +const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(15861) +const { webidl } = __nccwpck_require__(21744) +const { getGlobalOrigin } = __nccwpck_require__(71246) +const { URLSerializer } = __nccwpck_require__(685) +const { kHeadersList, kConstruct } = __nccwpck_require__(72785) +const assert = __nccwpck_require__(39491) +const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(82361) - // 1. If this’s header list does not contain `Set-Cookie`, then return « ». - // 2. Return the values of all headers in this’s header list whose name is - // a byte-case-insensitive match for `Set-Cookie`, in order. +let TransformStream = globalThis.TransformStream - const list = this[kHeadersList].cookies +const kAbortController = Symbol('abortController') - if (list) { - return [...list] +const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { + signal.removeEventListener('abort', abort) +}) + +// https://fetch.spec.whatwg.org/#request-class +class Request { + // https://fetch.spec.whatwg.org/#dom-request + constructor (input, init = {}) { + if (input === kConstruct) { + return } - return [] - } + webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) - // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine - get [kHeadersSortedMap] () { - if (this[kHeadersList][kHeadersSortedMap]) { - return this[kHeadersList][kHeadersSortedMap] - } + input = webidl.converters.RequestInfo(input) + init = webidl.converters.RequestInit(init) - // 1. Let headers be an empty list of headers with the key being the name - // and value the value. - const headers = [] + // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object + this[kRealm] = { + settingsObject: { + baseUrl: getGlobalOrigin(), + get origin () { + return this.baseUrl?.origin + }, + policyContainer: makePolicyContainer() + } + } - // 2. Let names be the result of convert header names to a sorted-lowercase - // set with all the names of the headers in list. - const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) - const cookies = this[kHeadersList].cookies + // 1. Let request be null. + let request = null - // 3. For each name of names: - for (let i = 0; i < names.length; ++i) { - const [name, value] = names[i] - // 1. If name is `set-cookie`, then: - if (name === 'set-cookie') { - // 1. Let values be a list of all values of headers in list whose name - // is a byte-case-insensitive match for name, in order. + // 2. Let fallbackMode be null. + let fallbackMode = null - // 2. For each value of values: - // 1. Append (name, value) to headers. - for (let j = 0; j < cookies.length; ++j) { - headers.push([name, cookies[j]]) - } - } else { - // 2. Otherwise: + // 3. Let baseURL be this’s relevant settings object’s API base URL. + const baseUrl = this[kRealm].settingsObject.baseUrl - // 1. Let value be the result of getting name from list. + // 4. Let signal be null. + let signal = null - // 2. Assert: value is non-null. - assert(value !== null) + // 5. If input is a string, then: + if (typeof input === 'string') { + // 1. Let parsedURL be the result of parsing input with baseURL. + // 2. If parsedURL is failure, then throw a TypeError. + let parsedURL + try { + parsedURL = new URL(input, baseUrl) + } catch (err) { + throw new TypeError('Failed to parse URL from ' + input, { cause: err }) + } - // 3. Append (name, value) to headers. - headers.push([name, value]) + // 3. If parsedURL includes credentials, then throw a TypeError. + if (parsedURL.username || parsedURL.password) { + throw new TypeError( + 'Request cannot be constructed from a URL that includes credentials: ' + + input + ) } - } - this[kHeadersList][kHeadersSortedMap] = headers + // 4. Set request to a new request whose URL is parsedURL. + request = makeRequest({ urlList: [parsedURL] }) - // 4. Return headers. - return headers - } + // 5. Set fallbackMode to "cors". + fallbackMode = 'cors' + } else { + // 6. Otherwise: - keys () { - webidl.brandCheck(this, Headers) + // 7. Assert: input is a Request object. + assert(input instanceof Request) - if (this[kGuard] === 'immutable') { - const value = this[kHeadersSortedMap] - return makeIterator(() => value, 'Headers', - 'key') + // 8. Set request to input’s request. + request = input[kState] + + // 9. Set signal to input’s signal. + signal = input[kSignal] } - return makeIterator( - () => [...this[kHeadersSortedMap].values()], - 'Headers', - 'key' - ) - } + // 7. Let origin be this’s relevant settings object’s origin. + const origin = this[kRealm].settingsObject.origin - values () { - webidl.brandCheck(this, Headers) + // 8. Let window be "client". + let window = 'client' - if (this[kGuard] === 'immutable') { - const value = this[kHeadersSortedMap] - return makeIterator(() => value, 'Headers', - 'value') + // 9. If request’s window is an environment settings object and its origin + // is same origin with origin, then set window to request’s window. + if ( + request.window?.constructor?.name === 'EnvironmentSettingsObject' && + sameOrigin(request.window, origin) + ) { + window = request.window } - return makeIterator( - () => [...this[kHeadersSortedMap].values()], - 'Headers', - 'value' - ) - } - - entries () { - webidl.brandCheck(this, Headers) + // 10. If init["window"] exists and is non-null, then throw a TypeError. + if (init.window != null) { + throw new TypeError(`'window' option '${window}' must be null`) + } - if (this[kGuard] === 'immutable') { - const value = this[kHeadersSortedMap] - return makeIterator(() => value, 'Headers', - 'key+value') + // 11. If init["window"] exists, then set window to "no-window". + if ('window' in init) { + window = 'no-window' } - return makeIterator( - () => [...this[kHeadersSortedMap].values()], - 'Headers', - 'key+value' - ) - } + // 12. Set request to a new request with the following properties: + request = makeRequest({ + // URL request’s URL. + // undici implementation note: this is set as the first item in request's urlList in makeRequest + // method request’s method. + method: request.method, + // header list A copy of request’s header list. + // undici implementation note: headersList is cloned in makeRequest + headersList: request.headersList, + // unsafe-request flag Set. + unsafeRequest: request.unsafeRequest, + // client This’s relevant settings object. + client: this[kRealm].settingsObject, + // window window. + window, + // priority request’s priority. + priority: request.priority, + // origin request’s origin. The propagation of the origin is only significant for navigation requests + // being handled by a service worker. In this scenario a request can have an origin that is different + // from the current client. + origin: request.origin, + // referrer request’s referrer. + referrer: request.referrer, + // referrer policy request’s referrer policy. + referrerPolicy: request.referrerPolicy, + // mode request’s mode. + mode: request.mode, + // credentials mode request’s credentials mode. + credentials: request.credentials, + // cache mode request’s cache mode. + cache: request.cache, + // redirect mode request’s redirect mode. + redirect: request.redirect, + // integrity metadata request’s integrity metadata. + integrity: request.integrity, + // keepalive request’s keepalive. + keepalive: request.keepalive, + // reload-navigation flag request’s reload-navigation flag. + reloadNavigation: request.reloadNavigation, + // history-navigation flag request’s history-navigation flag. + historyNavigation: request.historyNavigation, + // URL list A clone of request’s URL list. + urlList: [...request.urlList] + }) - /** - * @param {(value: string, key: string, self: Headers) => void} callbackFn - * @param {unknown} thisArg - */ - forEach (callbackFn, thisArg = globalThis) { - webidl.brandCheck(this, Headers) + const initHasKey = Object.keys(init).length !== 0 - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) + // 13. If init is not empty, then: + if (initHasKey) { + // 1. If request’s mode is "navigate", then set it to "same-origin". + if (request.mode === 'navigate') { + request.mode = 'same-origin' + } - if (typeof callbackFn !== 'function') { - throw new TypeError( - "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." - ) - } + // 2. Unset request’s reload-navigation flag. + request.reloadNavigation = false - for (const [key, value] of this) { - callbackFn.apply(thisArg, [value, key, this]) - } - } + // 3. Unset request’s history-navigation flag. + request.historyNavigation = false - [Symbol.for('nodejs.util.inspect.custom')] () { - webidl.brandCheck(this, Headers) + // 4. Set request’s origin to "client". + request.origin = 'client' - return this[kHeadersList] - } -} + // 5. Set request’s referrer to "client" + request.referrer = 'client' -Headers.prototype[Symbol.iterator] = Headers.prototype.entries + // 6. Set request’s referrer policy to the empty string. + request.referrerPolicy = '' -Object.defineProperties(Headers.prototype, { - append: kEnumerableProperty, - delete: kEnumerableProperty, - get: kEnumerableProperty, - has: kEnumerableProperty, - set: kEnumerableProperty, - getSetCookie: kEnumerableProperty, - keys: kEnumerableProperty, - values: kEnumerableProperty, - entries: kEnumerableProperty, - forEach: kEnumerableProperty, - [Symbol.iterator]: { enumerable: false }, - [Symbol.toStringTag]: { - value: 'Headers', - configurable: true - } -}) + // 7. Set request’s URL to request’s current URL. + request.url = request.urlList[request.urlList.length - 1] -webidl.converters.HeadersInit = function (V) { - if (webidl.util.Type(V) === 'Object') { - if (V[Symbol.iterator]) { - return webidl.converters['sequence>'](V) + // 8. Set request’s URL list to « request’s URL ». + request.urlList = [request.url] } - return webidl.converters['record'](V) - } - - throw webidl.errors.conversionFailed({ - prefix: 'Headers constructor', - argument: 'Argument 1', - types: ['sequence>', 'record'] - }) -} + // 14. If init["referrer"] exists, then: + if (init.referrer !== undefined) { + // 1. Let referrer be init["referrer"]. + const referrer = init.referrer -module.exports = { - fill, - Headers, - HeadersList -} + // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". + if (referrer === '') { + request.referrer = 'no-referrer' + } else { + // 1. Let parsedReferrer be the result of parsing referrer with + // baseURL. + // 2. If parsedReferrer is failure, then throw a TypeError. + let parsedReferrer + try { + parsedReferrer = new URL(referrer, baseUrl) + } catch (err) { + throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) + } + // 3. If one of the following is true + // - parsedReferrer’s scheme is "about" and path is the string "client" + // - parsedReferrer’s origin is not same origin with origin + // then set request’s referrer to "client". + if ( + (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || + (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + ) { + request.referrer = 'client' + } else { + // 4. Otherwise, set request’s referrer to parsedReferrer. + request.referrer = parsedReferrer + } + } + } -/***/ }), + // 15. If init["referrerPolicy"] exists, then set request’s referrer policy + // to it. + if (init.referrerPolicy !== undefined) { + request.referrerPolicy = init.referrerPolicy + } -/***/ 4881: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. + let mode + if (init.mode !== undefined) { + mode = init.mode + } else { + mode = fallbackMode + } -"use strict"; -// https://github.com/Ethan-Arrowood/undici-fetch + // 17. If mode is "navigate", then throw a TypeError. + if (mode === 'navigate') { + throw webidl.errors.exception({ + header: 'Request constructor', + message: 'invalid request mode navigate.' + }) + } + // 18. If mode is non-null, set request’s mode to mode. + if (mode != null) { + request.mode = mode + } + // 19. If init["credentials"] exists, then set request’s credentials mode + // to it. + if (init.credentials !== undefined) { + request.credentials = init.credentials + } -const { - Response, - makeNetworkError, - makeAppropriateNetworkError, - filterResponse, - makeResponse -} = __nccwpck_require__(7823) -const { Headers } = __nccwpck_require__(554) -const { Request, makeRequest } = __nccwpck_require__(8359) -const zlib = __nccwpck_require__(9796) -const { - bytesMatch, - makePolicyContainer, - clonePolicyContainer, - requestBadPort, - TAOCheck, - appendRequestOriginHeader, - responseLocationURL, - requestCurrentURL, - setRequestReferrerPolicyOnRedirect, - tryUpgradeRequestToAPotentiallyTrustworthyURL, - createOpaqueTimingInfo, - appendFetchMetadata, - corsCheck, - crossOriginResourcePolicyCheck, - determineRequestsReferrer, - coarsenedSharedCurrentTime, - createDeferredPromise, - isBlobLike, - sameOrigin, - isCancelled, - isAborted, - isErrorLike, - fullyReadBody, - readableStreamClose, - isomorphicEncode, - urlIsLocal, - urlIsHttpHttpsScheme, - urlHasHttpsScheme -} = __nccwpck_require__(2538) -const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) -const assert = __nccwpck_require__(9491) -const { safelyExtractBody } = __nccwpck_require__(9990) -const { - redirectStatusSet, - nullBodyStatus, - safeMethodsSet, - requestBodyHeader, - subresourceSet, - DOMException -} = __nccwpck_require__(1037) -const { kHeadersList } = __nccwpck_require__(2785) -const EE = __nccwpck_require__(2361) -const { Readable, pipeline } = __nccwpck_require__(2781) -const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3983) -const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(685) -const { TransformStream } = __nccwpck_require__(5356) -const { getGlobalDispatcher } = __nccwpck_require__(1892) -const { webidl } = __nccwpck_require__(1744) -const { STATUS_CODES } = __nccwpck_require__(3685) -const GET_OR_HEAD = ['GET', 'HEAD'] + // 18. If init["cache"] exists, then set request’s cache mode to it. + if (init.cache !== undefined) { + request.cache = init.cache + } -/** @type {import('buffer').resolveObjectURL} */ -let resolveObjectURL -let ReadableStream = globalThis.ReadableStream + // 21. If request’s cache mode is "only-if-cached" and request’s mode is + // not "same-origin", then throw a TypeError. + if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { + throw new TypeError( + "'only-if-cached' can be set only with 'same-origin' mode" + ) + } -class Fetch extends EE { - constructor (dispatcher) { - super() + // 22. If init["redirect"] exists, then set request’s redirect mode to it. + if (init.redirect !== undefined) { + request.redirect = init.redirect + } - this.dispatcher = dispatcher - this.connection = null - this.dump = false - this.state = 'ongoing' - // 2 terminated listeners get added per request, - // but only 1 gets removed. If there are 20 redirects, - // 21 listeners will be added. - // See https://github.com/nodejs/undici/issues/1711 - // TODO (fix): Find and fix root cause for leaked listener. - this.setMaxListeners(21) - } + // 23. If init["integrity"] exists, then set request’s integrity metadata to it. + if (init.integrity != null) { + request.integrity = String(init.integrity) + } - terminate (reason) { - if (this.state !== 'ongoing') { - return + // 24. If init["keepalive"] exists, then set request’s keepalive to it. + if (init.keepalive !== undefined) { + request.keepalive = Boolean(init.keepalive) } - this.state = 'terminated' - this.connection?.destroy(reason) - this.emit('terminated', reason) - } + // 25. If init["method"] exists, then: + if (init.method !== undefined) { + // 1. Let method be init["method"]. + let method = init.method - // https://fetch.spec.whatwg.org/#fetch-controller-abort - abort (error) { - if (this.state !== 'ongoing') { - return - } + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } - // 1. Set controller’s state to "aborted". - this.state = 'aborted' + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) + } - // 2. Let fallbackError be an "AbortError" DOMException. - // 3. Set error to fallbackError if it is not given. - if (!error) { - error = new DOMException('The operation was aborted.', 'AbortError') - } + // 3. Normalize method. + method = normalizeMethodRecord[method] ?? normalizeMethod(method) - // 4. Let serializedError be StructuredSerialize(error). - // If that threw an exception, catch it, and let - // serializedError be StructuredSerialize(fallbackError). + // 4. Set request’s method to method. + request.method = method + } - // 5. Set controller’s serialized abort reason to serializedError. - this.serializedAbortReason = error + // 26. If init["signal"] exists, then set signal to it. + if (init.signal !== undefined) { + signal = init.signal + } - this.connection?.destroy(error) - this.emit('terminated', error) - } -} + // 27. Set this’s request to request. + this[kState] = request -// https://fetch.spec.whatwg.org/#fetch-method -function fetch (input, init = {}) { - webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) + // 28. Set this’s signal to a new AbortSignal object with this’s relevant + // Realm. + // TODO: could this be simplified with AbortSignal.any + // (https://dom.spec.whatwg.org/#dom-abortsignal-any) + const ac = new AbortController() + this[kSignal] = ac.signal + this[kSignal][kRealm] = this[kRealm] - // 1. Let p be a new promise. - const p = createDeferredPromise() + // 29. If signal is not null, then make this’s signal follow signal. + if (signal != null) { + if ( + !signal || + typeof signal.aborted !== 'boolean' || + typeof signal.addEventListener !== 'function' + ) { + throw new TypeError( + "Failed to construct 'Request': member signal is not of type AbortSignal." + ) + } - // 2. Let requestObject be the result of invoking the initial value of - // Request as constructor with input and init as arguments. If this throws - // an exception, reject p with it and return p. - let requestObject + if (signal.aborted) { + ac.abort(signal.reason) + } else { + // Keep a strong ref to ac while request object + // is alive. This is needed to prevent AbortController + // from being prematurely garbage collected. + // See, https://github.com/nodejs/undici/issues/1926. + this[kAbortController] = ac - try { - requestObject = new Request(input, init) - } catch (e) { - p.reject(e) - return p.promise - } + const acRef = new WeakRef(ac) + const abort = function () { + const ac = acRef.deref() + if (ac !== undefined) { + ac.abort(this.reason) + } + } - // 3. Let request be requestObject’s request. - const request = requestObject[kState] + // Third-party AbortControllers may not work with these. + // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. + try { + // If the max amount of listeners is equal to the default, increase it + // This is only available in node >= v19.9.0 + if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { + setMaxListeners(100, signal) + } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { + setMaxListeners(100, signal) + } + } catch {} - // 4. If requestObject’s signal’s aborted flag is set, then: - if (requestObject.signal.aborted) { - // 1. Abort the fetch() call with p, request, null, and - // requestObject’s signal’s abort reason. - abortFetch(p, request, null, requestObject.signal.reason) + util.addAbortListener(signal, abort) + requestFinalizer.register(ac, { signal, abort }) + } + } - // 2. Return p. - return p.promise - } + // 30. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is request’s header list and guard is + // "request". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kHeadersList] = request.headersList + this[kHeaders][kGuard] = 'request' + this[kHeaders][kRealm] = this[kRealm] - // 5. Let globalObject be request’s client’s global object. - const globalObject = request.client.globalObject + // 31. If this’s request’s mode is "no-cors", then: + if (mode === 'no-cors') { + // 1. If this’s request’s method is not a CORS-safelisted method, + // then throw a TypeError. + if (!corsSafeListedMethodsSet.has(request.method)) { + throw new TypeError( + `'${request.method} is unsupported in no-cors mode.` + ) + } - // 6. If globalObject is a ServiceWorkerGlobalScope object, then set - // request’s service-workers mode to "none". - if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { - request.serviceWorkers = 'none' - } + // 2. Set this’s headers’s guard to "request-no-cors". + this[kHeaders][kGuard] = 'request-no-cors' + } - // 7. Let responseObject be null. - let responseObject = null + // 32. If init is not empty, then: + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] + // 1. Let headers be a copy of this’s headers and its associated header + // list. + // 2. If init["headers"] exists, then set headers to init["headers"]. + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) - // 8. Let relevantRealm be this’s relevant Realm. - const relevantRealm = null + // 3. Empty this’s headers’s header list. + headersList.clear() - // 9. Let locallyAborted be false. - let locallyAborted = false + // 4. If headers is a Headers object, then for each header in its header + // list, append header’s name/header’s value to this’s headers. + if (headers instanceof HeadersList) { + for (const [key, val] of headers) { + headersList.append(key, val) + } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies + } else { + // 5. Otherwise, fill this’s headers with headers. + fillHeaders(this[kHeaders], headers) + } + } - // 10. Let controller be null. - let controller = null + // 33. Let inputBody be input’s request’s body if input is a Request + // object; otherwise null. + const inputBody = input instanceof Request ? input[kState].body : null - // 11. Add the following abort steps to requestObject’s signal: - addAbortListener( - requestObject.signal, - () => { - // 1. Set locallyAborted to true. - locallyAborted = true + // 34. If either init["body"] exists and is non-null or inputBody is + // non-null, and request’s method is `GET` or `HEAD`, then throw a + // TypeError. + if ( + (init.body != null || inputBody != null) && + (request.method === 'GET' || request.method === 'HEAD') + ) { + throw new TypeError('Request with GET/HEAD method cannot have body.') + } - // 2. Assert: controller is non-null. - assert(controller != null) + // 35. Let initBody be null. + let initBody = null - // 3. Abort controller with requestObject’s signal’s abort reason. - controller.abort(requestObject.signal.reason) + // 36. If init["body"] exists and is non-null, then: + if (init.body != null) { + // 1. Let Content-Type be null. + // 2. Set initBody and Content-Type to the result of extracting + // init["body"], with keepalive set to request’s keepalive. + const [extractedBody, contentType] = extractBody( + init.body, + request.keepalive + ) + initBody = extractedBody - // 4. Abort the fetch() call with p, request, responseObject, - // and requestObject’s signal’s abort reason. - abortFetch(p, request, responseObject, requestObject.signal.reason) + // 3, If Content-Type is non-null and this’s headers’s header list does + // not contain `Content-Type`, then append `Content-Type`/Content-Type to + // this’s headers. + if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + this[kHeaders].append('content-type', contentType) + } } - ) - // 12. Let handleFetchDone given response response be to finalize and - // report timing with response, globalObject, and "fetch". - const handleFetchDone = (response) => - finalizeAndReportTiming(response, 'fetch') + // 37. Let inputOrInitBody be initBody if it is non-null; otherwise + // inputBody. + const inputOrInitBody = initBody ?? inputBody - // 13. Set controller to the result of calling fetch given request, - // with processResponseEndOfBody set to handleFetchDone, and processResponse - // given response being these substeps: + // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is + // null, then: + if (inputOrInitBody != null && inputOrInitBody.source == null) { + // 1. If initBody is non-null and init["duplex"] does not exist, + // then throw a TypeError. + if (initBody != null && init.duplex == null) { + throw new TypeError('RequestInit: duplex option is required when sending a body.') + } - const processResponse = (response) => { - // 1. If locallyAborted is true, terminate these substeps. - if (locallyAborted) { - return Promise.resolve() + // 2. If this’s request’s mode is neither "same-origin" nor "cors", + // then throw a TypeError. + if (request.mode !== 'same-origin' && request.mode !== 'cors') { + throw new TypeError( + 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' + ) + } + + // 3. Set this’s request’s use-CORS-preflight flag. + request.useCORSPreflightFlag = true } - // 2. If response’s aborted flag is set, then: - if (response.aborted) { - // 1. Let deserializedError be the result of deserialize a serialized - // abort reason given controller’s serialized abort reason and - // relevantRealm. + // 39. Let finalBody be inputOrInitBody. + let finalBody = inputOrInitBody - // 2. Abort the fetch() call with p, request, responseObject, and - // deserializedError. + // 40. If initBody is null and inputBody is non-null, then: + if (initBody == null && inputBody != null) { + // 1. If input is unusable, then throw a TypeError. + if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { + throw new TypeError( + 'Cannot construct a Request with a Request object that has already been used.' + ) + } - abortFetch(p, request, responseObject, controller.serializedAbortReason) - return Promise.resolve() + // 2. Set finalBody to the result of creating a proxy for inputBody. + if (!TransformStream) { + TransformStream = (__nccwpck_require__(35356).TransformStream) + } + + // https://streams.spec.whatwg.org/#readablestream-create-a-proxy + const identityTransform = new TransformStream() + inputBody.stream.pipeThrough(identityTransform) + finalBody = { + source: inputBody.source, + length: inputBody.length, + stream: identityTransform.readable + } } - // 3. If response is a network error, then reject p with a TypeError - // and terminate these substeps. - if (response.type === 'error') { - p.reject( - Object.assign(new TypeError('fetch failed'), { cause: response.error }) - ) - return Promise.resolve() - } + // 41. Set this’s request’s body to finalBody. + this[kState].body = finalBody + } - // 4. Set responseObject to the result of creating a Response object, - // given response, "immutable", and relevantRealm. - responseObject = new Response() - responseObject[kState] = response - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kHeadersList] = response.headersList - responseObject[kHeaders][kGuard] = 'immutable' - responseObject[kHeaders][kRealm] = relevantRealm + // Returns request’s HTTP method, which is "GET" by default. + get method () { + webidl.brandCheck(this, Request) - // 5. Resolve p with responseObject. - p.resolve(responseObject) + // The method getter steps are to return this’s request’s method. + return this[kState].method } - controller = fetching({ - request, - processResponseEndOfBody: handleFetchDone, - processResponse, - dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici - }) + // Returns the URL of request as a string. + get url () { + webidl.brandCheck(this, Request) - // 14. Return p. - return p.promise -} + // The url getter steps are to return this’s request’s URL, serialized. + return URLSerializer(this[kState].url) + } -// https://fetch.spec.whatwg.org/#finalize-and-report-timing -function finalizeAndReportTiming (response, initiatorType = 'other') { - // 1. If response is an aborted network error, then return. - if (response.type === 'error' && response.aborted) { - return + // Returns a Headers object consisting of the headers associated with request. + // Note that headers added in the network layer by the user agent will not + // be accounted for in this object, e.g., the "Host" header. + get headers () { + webidl.brandCheck(this, Request) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] } - // 2. If response’s URL list is null or empty, then return. - if (!response.urlList?.length) { - return + // Returns the kind of resource requested by request, e.g., "document" + // or "script". + get destination () { + webidl.brandCheck(this, Request) + + // The destination getter are to return this’s request’s destination. + return this[kState].destination } - // 3. Let originalURL be response’s URL list[0]. - const originalURL = response.urlList[0] + // Returns the referrer of request. Its value can be a same-origin URL if + // explicitly set in init, the empty string to indicate no referrer, and + // "about:client" when defaulting to the global’s default. This is used + // during fetching to determine the value of the `Referer` header of the + // request being made. + get referrer () { + webidl.brandCheck(this, Request) - // 4. Let timingInfo be response’s timing info. - let timingInfo = response.timingInfo + // 1. If this’s request’s referrer is "no-referrer", then return the + // empty string. + if (this[kState].referrer === 'no-referrer') { + return '' + } - // 5. Let cacheState be response’s cache state. - let cacheState = response.cacheState + // 2. If this’s request’s referrer is "client", then return + // "about:client". + if (this[kState].referrer === 'client') { + return 'about:client' + } - // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. - if (!urlIsHttpHttpsScheme(originalURL)) { - return + // Return this’s request’s referrer, serialized. + return this[kState].referrer.toString() } - // 7. If timingInfo is null, then return. - if (timingInfo === null) { - return + // Returns the referrer policy associated with request. + // This is used during fetching to compute the value of the request’s + // referrer. + get referrerPolicy () { + webidl.brandCheck(this, Request) + + // The referrerPolicy getter steps are to return this’s request’s referrer policy. + return this[kState].referrerPolicy } - // 8. If response’s timing allow passed flag is not set, then: - if (!response.timingAllowPassed) { - // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. - timingInfo = createOpaqueTimingInfo({ - startTime: timingInfo.startTime - }) + // Returns the mode associated with request, which is a string indicating + // whether the request will use CORS, or will be restricted to same-origin + // URLs. + get mode () { + webidl.brandCheck(this, Request) - // 2. Set cacheState to the empty string. - cacheState = '' + // The mode getter steps are to return this’s request’s mode. + return this[kState].mode } - // 9. Set timingInfo’s end time to the coarsened shared current time - // given global’s relevant settings object’s cross-origin isolated - // capability. - // TODO: given global’s relevant settings object’s cross-origin isolated - // capability? - timingInfo.endTime = coarsenedSharedCurrentTime() - - // 10. Set response’s timing info to timingInfo. - response.timingInfo = timingInfo + // Returns the credentials mode associated with request, + // which is a string indicating whether credentials will be sent with the + // request always, never, or only when sent to a same-origin URL. + get credentials () { + // The credentials getter steps are to return this’s request’s credentials mode. + return this[kState].credentials + } - // 11. Mark resource timing for timingInfo, originalURL, initiatorType, - // global, and cacheState. - markResourceTiming( - timingInfo, - originalURL, - initiatorType, - globalThis, - cacheState - ) -} + // Returns the cache mode associated with request, + // which is a string indicating how the request will + // interact with the browser’s cache when fetching. + get cache () { + webidl.brandCheck(this, Request) -// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing -function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { - if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { - performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) + // The cache getter steps are to return this’s request’s cache mode. + return this[kState].cache } -} -// https://fetch.spec.whatwg.org/#abort-fetch -function abortFetch (p, request, responseObject, error) { - // Note: AbortSignal.reason was added in node v17.2.0 - // which would give us an undefined error to reject with. - // Remove this once node v16 is no longer supported. - if (!error) { - error = new DOMException('The operation was aborted.', 'AbortError') + // Returns the redirect mode associated with request, + // which is a string indicating how redirects for the + // request will be handled during fetching. A request + // will follow redirects by default. + get redirect () { + webidl.brandCheck(this, Request) + + // The redirect getter steps are to return this’s request’s redirect mode. + return this[kState].redirect } - // 1. Reject promise with error. - p.reject(error) + // Returns request’s subresource integrity metadata, which is a + // cryptographic hash of the resource being fetched. Its value + // consists of multiple hashes separated by whitespace. [SRI] + get integrity () { + webidl.brandCheck(this, Request) - // 2. If request’s body is not null and is readable, then cancel request’s - // body with error. - if (request.body != null && isReadable(request.body?.stream)) { - request.body.stream.cancel(error).catch((err) => { - if (err.code === 'ERR_INVALID_STATE') { - // Node bug? - return - } - throw err - }) + // The integrity getter steps are to return this’s request’s integrity + // metadata. + return this[kState].integrity } - // 3. If responseObject is null, then return. - if (responseObject == null) { - return + // Returns a boolean indicating whether or not request can outlive the + // global in which it was created. + get keepalive () { + webidl.brandCheck(this, Request) + + // The keepalive getter steps are to return this’s request’s keepalive. + return this[kState].keepalive } - // 4. Let response be responseObject’s response. - const response = responseObject[kState] + // Returns a boolean indicating whether or not request is for a reload + // navigation. + get isReloadNavigation () { + webidl.brandCheck(this, Request) - // 5. If response’s body is not null and is readable, then error response’s - // body with error. - if (response.body != null && isReadable(response.body?.stream)) { - response.body.stream.cancel(error).catch((err) => { - if (err.code === 'ERR_INVALID_STATE') { - // Node bug? - return - } - throw err - }) + // The isReloadNavigation getter steps are to return true if this’s + // request’s reload-navigation flag is set; otherwise false. + return this[kState].reloadNavigation } -} -// https://fetch.spec.whatwg.org/#fetching -function fetching ({ - request, - processRequestBodyChunkLength, - processRequestEndOfBody, - processResponse, - processResponseEndOfBody, - processResponseConsumeBody, - useParallelQueue = false, - dispatcher // undici -}) { - // 1. Let taskDestination be null. - let taskDestination = null + // Returns a boolean indicating whether or not request is for a history + // navigation (a.k.a. back-foward navigation). + get isHistoryNavigation () { + webidl.brandCheck(this, Request) - // 2. Let crossOriginIsolatedCapability be false. - let crossOriginIsolatedCapability = false + // The isHistoryNavigation getter steps are to return true if this’s request’s + // history-navigation flag is set; otherwise false. + return this[kState].historyNavigation + } - // 3. If request’s client is non-null, then: - if (request.client != null) { - // 1. Set taskDestination to request’s client’s global object. - taskDestination = request.client.globalObject + // Returns the signal associated with request, which is an AbortSignal + // object indicating whether or not request has been aborted, and its + // abort event handler. + get signal () { + webidl.brandCheck(this, Request) - // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin - // isolated capability. - crossOriginIsolatedCapability = - request.client.crossOriginIsolatedCapability + // The signal getter steps are to return this’s signal. + return this[kSignal] } - // 4. If useParallelQueue is true, then set taskDestination to the result of - // starting a new parallel queue. - // TODO - - // 5. Let timingInfo be a new fetch timing info whose start time and - // post-redirect start time are the coarsened shared current time given - // crossOriginIsolatedCapability. - const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) - const timingInfo = createOpaqueTimingInfo({ - startTime: currenTime - }) + get body () { + webidl.brandCheck(this, Request) - // 6. Let fetchParams be a new fetch params whose - // request is request, - // timing info is timingInfo, - // process request body chunk length is processRequestBodyChunkLength, - // process request end-of-body is processRequestEndOfBody, - // process response is processResponse, - // process response consume body is processResponseConsumeBody, - // process response end-of-body is processResponseEndOfBody, - // task destination is taskDestination, - // and cross-origin isolated capability is crossOriginIsolatedCapability. - const fetchParams = { - controller: new Fetch(dispatcher), - request, - timingInfo, - processRequestBodyChunkLength, - processRequestEndOfBody, - processResponse, - processResponseConsumeBody, - processResponseEndOfBody, - taskDestination, - crossOriginIsolatedCapability + return this[kState].body ? this[kState].body.stream : null } - // 7. If request’s body is a byte sequence, then set request’s body to - // request’s body as a body. - // NOTE: Since fetching is only called from fetch, body should already be - // extracted. - assert(!request.body || request.body.stream) + get bodyUsed () { + webidl.brandCheck(this, Request) - // 8. If request’s window is "client", then set request’s window to request’s - // client, if request’s client’s global object is a Window object; otherwise - // "no-window". - if (request.window === 'client') { - // TODO: What if request.client is null? - request.window = - request.client?.globalObject?.constructor?.name === 'Window' - ? request.client - : 'no-window' + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) } - // 9. If request’s origin is "client", then set request’s origin to request’s - // client’s origin. - if (request.origin === 'client') { - // TODO: What if request.client is null? - request.origin = request.client?.origin + get duplex () { + webidl.brandCheck(this, Request) + + return 'half' } - // 10. If all of the following conditions are true: - // TODO + // Returns a clone of request. + clone () { + webidl.brandCheck(this, Request) - // 11. If request’s policy container is "client", then: - if (request.policyContainer === 'client') { - // 1. If request’s client is non-null, then set request’s policy - // container to a clone of request’s client’s policy container. [HTML] - if (request.client != null) { - request.policyContainer = clonePolicyContainer( - request.client.policyContainer - ) - } else { - // 2. Otherwise, set request’s policy container to a new policy - // container. - request.policyContainer = makePolicyContainer() + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || this.body?.locked) { + throw new TypeError('unusable') } - } - // 12. If request’s header list does not contain `Accept`, then: - if (!request.headersList.contains('accept')) { - // 1. Let value be `*/*`. - const value = '*/*' + // 2. Let clonedRequest be the result of cloning this’s request. + const clonedRequest = cloneRequest(this[kState]) - // 2. A user agent should set value to the first matching statement, if - // any, switching on request’s destination: - // "document" - // "frame" - // "iframe" - // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` - // "image" - // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` - // "style" - // `text/css,*/*;q=0.1` - // TODO + // 3. Let clonedRequestObject be the result of creating a Request object, + // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. + const clonedRequestObject = new Request(kConstruct) + clonedRequestObject[kState] = clonedRequest + clonedRequestObject[kRealm] = this[kRealm] + clonedRequestObject[kHeaders] = new Headers(kConstruct) + clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList + clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] - // 3. Append `Accept`/value to request’s header list. - request.headersList.append('accept', value) - } + // 4. Make clonedRequestObject’s signal follow this’s signal. + const ac = new AbortController() + if (this.signal.aborted) { + ac.abort(this.signal.reason) + } else { + util.addAbortListener( + this.signal, + () => { + ac.abort(this.signal.reason) + } + ) + } + clonedRequestObject[kSignal] = ac.signal - // 13. If request’s header list does not contain `Accept-Language`, then - // user agents should append `Accept-Language`/an appropriate value to - // request’s header list. - if (!request.headersList.contains('accept-language')) { - request.headersList.append('accept-language', '*') + // 4. Return clonedRequestObject. + return clonedRequestObject } +} - // 14. If request’s priority is null, then use request’s initiator and - // destination appropriately in setting request’s priority to a - // user-agent-defined object. - if (request.priority === null) { - // TODO - } +mixinBody(Request) - // 15. If request is a subresource request, then: - if (subresourceSet.has(request.destination)) { - // TODO +function makeRequest (init) { + // https://fetch.spec.whatwg.org/#requests + const request = { + method: 'GET', + localURLsOnly: false, + unsafeRequest: false, + body: null, + client: null, + reservedClient: null, + replacesClientId: '', + window: 'client', + keepalive: false, + serviceWorkers: 'all', + initiator: '', + destination: '', + priority: null, + origin: 'client', + policyContainer: 'client', + referrer: 'client', + referrerPolicy: '', + mode: 'no-cors', + useCORSPreflightFlag: false, + credentials: 'same-origin', + useCredentials: false, + cache: 'default', + redirect: 'follow', + integrity: '', + cryptoGraphicsNonceMetadata: '', + parserMetadata: '', + reloadNavigation: false, + historyNavigation: false, + userActivation: false, + taintedOrigin: false, + redirectCount: 0, + responseTainting: 'basic', + preventNoCacheCacheControlHeaderModification: false, + done: false, + timingAllowFailed: false, + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList() } - - // 16. Run main fetch given fetchParams. - mainFetch(fetchParams) - .catch(err => { - fetchParams.controller.terminate(err) - }) - - // 17. Return fetchParam's controller - return fetchParams.controller + request.url = request.urlList[0] + return request } -// https://fetch.spec.whatwg.org/#concept-main-fetch -async function mainFetch (fetchParams, recursive = false) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request +// https://fetch.spec.whatwg.org/#concept-request-clone +function cloneRequest (request) { + // To clone a request request, run these steps: - // 2. Let response be null. - let response = null + // 1. Let newRequest be a copy of request, except for its body. + const newRequest = makeRequest({ ...request, body: null }) - // 3. If request’s local-URLs-only flag is set and request’s current URL is - // not local, then set response to a network error. - if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { - response = makeNetworkError('local URLs only') + // 2. If request’s body is non-null, set newRequest’s body to the + // result of cloning request’s body. + if (request.body != null) { + newRequest.body = cloneBody(request.body) } - // 4. Run report Content Security Policy violations for request. - // TODO - - // 5. Upgrade request to a potentially trustworthy URL, if appropriate. - tryUpgradeRequestToAPotentiallyTrustworthyURL(request) + // 3. Return newRequest. + return newRequest +} - // 6. If should request be blocked due to a bad port, should fetching request - // be blocked as mixed content, or should request be blocked by Content - // Security Policy returns blocked, then set response to a network error. - if (requestBadPort(request) === 'blocked') { - response = makeNetworkError('bad port') +Object.defineProperties(Request.prototype, { + method: kEnumerableProperty, + url: kEnumerableProperty, + headers: kEnumerableProperty, + redirect: kEnumerableProperty, + clone: kEnumerableProperty, + signal: kEnumerableProperty, + duplex: kEnumerableProperty, + destination: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + isHistoryNavigation: kEnumerableProperty, + isReloadNavigation: kEnumerableProperty, + keepalive: kEnumerableProperty, + integrity: kEnumerableProperty, + cache: kEnumerableProperty, + credentials: kEnumerableProperty, + attribute: kEnumerableProperty, + referrerPolicy: kEnumerableProperty, + referrer: kEnumerableProperty, + mode: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Request', + configurable: true } - // TODO: should fetching request be blocked as mixed content? - // TODO: should request be blocked by Content Security Policy? +}) - // 7. If request’s referrer policy is the empty string, then set request’s - // referrer policy to request’s policy container’s referrer policy. - if (request.referrerPolicy === '') { - request.referrerPolicy = request.policyContainer.referrerPolicy - } +webidl.converters.Request = webidl.interfaceConverter( + Request +) - // 8. If request’s referrer is not "no-referrer", then set request’s - // referrer to the result of invoking determine request’s referrer. - if (request.referrer !== 'no-referrer') { - request.referrer = determineRequestsReferrer(request) +// https://fetch.spec.whatwg.org/#requestinfo +webidl.converters.RequestInfo = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) } - // 9. Set request’s current URL’s scheme to "https" if all of the following - // conditions are true: - // - request’s current URL’s scheme is "http" - // - request’s current URL’s host is a domain - // - Matching request’s current URL’s host per Known HSTS Host Domain Name - // Matching results in either a superdomain match with an asserted - // includeSubDomains directive or a congruent match (with or without an - // asserted includeSubDomains directive). [HSTS] - // TODO + if (V instanceof Request) { + return webidl.converters.Request(V) + } - // 10. If recursive is false, then run the remaining steps in parallel. - // TODO + return webidl.converters.USVString(V) +} - // 11. If response is null, then set response to the result of running - // the steps corresponding to the first matching statement: - if (response === null) { - response = await (async () => { - const currentURL = requestCurrentURL(request) +webidl.converters.AbortSignal = webidl.interfaceConverter( + AbortSignal +) - if ( - // - request’s current URL’s origin is same origin with request’s origin, - // and request’s response tainting is "basic" - (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || - // request’s current URL’s scheme is "data" - (currentURL.protocol === 'data:') || - // - request’s mode is "navigate" or "websocket" - (request.mode === 'navigate' || request.mode === 'websocket') - ) { - // 1. Set request’s response tainting to "basic". - request.responseTainting = 'basic' +// https://fetch.spec.whatwg.org/#requestinit +webidl.converters.RequestInit = webidl.dictionaryConverter([ + { + key: 'method', + converter: webidl.converters.ByteString + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + }, + { + key: 'body', + converter: webidl.nullableConverter( + webidl.converters.BodyInit + ) + }, + { + key: 'referrer', + converter: webidl.converters.USVString + }, + { + key: 'referrerPolicy', + converter: webidl.converters.DOMString, + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy + allowedValues: referrerPolicy + }, + { + key: 'mode', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#concept-request-mode + allowedValues: requestMode + }, + { + key: 'credentials', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcredentials + allowedValues: requestCredentials + }, + { + key: 'cache', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcache + allowedValues: requestCache + }, + { + key: 'redirect', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestredirect + allowedValues: requestRedirect + }, + { + key: 'integrity', + converter: webidl.converters.DOMString + }, + { + key: 'keepalive', + converter: webidl.converters.boolean + }, + { + key: 'signal', + converter: webidl.nullableConverter( + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) + ) + }, + { + key: 'window', + converter: webidl.converters.any + }, + { + key: 'duplex', + converter: webidl.converters.DOMString, + allowedValues: requestDuplex + } +]) - // 2. Return the result of running scheme fetch given fetchParams. - return await schemeFetch(fetchParams) - } +module.exports = { Request, makeRequest } - // request’s mode is "same-origin" - if (request.mode === 'same-origin') { - // 1. Return a network error. - return makeNetworkError('request mode cannot be "same-origin"') - } - // request’s mode is "no-cors" - if (request.mode === 'no-cors') { - // 1. If request’s redirect mode is not "follow", then return a network - // error. - if (request.redirect !== 'follow') { - return makeNetworkError( - 'redirect mode cannot be "follow" for "no-cors" request' - ) - } +/***/ }), - // 2. Set request’s response tainting to "opaque". - request.responseTainting = 'opaque' +/***/ 27823: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 3. Return the result of running scheme fetch given fetchParams. - return await schemeFetch(fetchParams) - } +"use strict"; - // request’s current URL’s scheme is not an HTTP(S) scheme - if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { - // Return a network error. - return makeNetworkError('URL scheme must be a HTTP(S) scheme') - } - // - request’s use-CORS-preflight flag is set - // - request’s unsafe-request flag is set and either request’s method is - // not a CORS-safelisted method or CORS-unsafe request-header names with - // request’s header list is not empty - // 1. Set request’s response tainting to "cors". - // 2. Let corsWithPreflightResponse be the result of running HTTP fetch - // given fetchParams and true. - // 3. If corsWithPreflightResponse is a network error, then clear cache - // entries using request. - // 4. Return corsWithPreflightResponse. - // TODO +const { Headers, HeadersList, fill } = __nccwpck_require__(10554) +const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(41472) +const util = __nccwpck_require__(83983) +const { kEnumerableProperty } = util +const { + isValidReasonPhrase, + isCancelled, + isAborted, + isBlobLike, + serializeJavascriptValueToJSONString, + isErrorLike, + isomorphicEncode +} = __nccwpck_require__(52538) +const { + redirectStatusSet, + nullBodyStatus, + DOMException +} = __nccwpck_require__(41037) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(15861) +const { webidl } = __nccwpck_require__(21744) +const { FormData } = __nccwpck_require__(72015) +const { getGlobalOrigin } = __nccwpck_require__(71246) +const { URLSerializer } = __nccwpck_require__(685) +const { kHeadersList, kConstruct } = __nccwpck_require__(72785) +const assert = __nccwpck_require__(39491) +const { types } = __nccwpck_require__(73837) - // Otherwise - // 1. Set request’s response tainting to "cors". - request.responseTainting = 'cors' +const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(35356).ReadableStream) +const textEncoder = new TextEncoder('utf-8') - // 2. Return the result of running HTTP fetch given fetchParams. - return await httpFetch(fetchParams) - })() - } +// https://fetch.spec.whatwg.org/#response-class +class Response { + // Creates network error Response. + static error () { + // TODO + const relevantRealm = { settingsObject: {} } - // 12. If recursive is true, then return response. - if (recursive) { - return response + // The static error() method steps are to return the result of creating a + // Response object, given a new network error, "immutable", and this’s + // relevant Realm. + const responseObject = new Response() + responseObject[kState] = makeNetworkError() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + return responseObject } - // 13. If response is not a network error and response is not a filtered - // response, then: - if (response.status !== 0 && !response.internalResponse) { - // If request’s response tainting is "cors", then: - if (request.responseTainting === 'cors') { - // 1. Let headerNames be the result of extracting header list values - // given `Access-Control-Expose-Headers` and response’s header list. - // TODO - // 2. If request’s credentials mode is not "include" and headerNames - // contains `*`, then set response’s CORS-exposed header-name list to - // all unique header names in response’s header list. - // TODO - // 3. Otherwise, if headerNames is not null or failure, then set - // response’s CORS-exposed header-name list to headerNames. - // TODO - } + // https://fetch.spec.whatwg.org/#dom-response-json + static json (data, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) - // Set response to the following filtered response with response as its - // internal response, depending on request’s response tainting: - if (request.responseTainting === 'basic') { - response = filterResponse(response, 'basic') - } else if (request.responseTainting === 'cors') { - response = filterResponse(response, 'cors') - } else if (request.responseTainting === 'opaque') { - response = filterResponse(response, 'opaque') - } else { - assert(false) + if (init !== null) { + init = webidl.converters.ResponseInit(init) } - } - - // 14. Let internalResponse be response, if response is a network error, - // and response’s internal response otherwise. - let internalResponse = - response.status === 0 ? response : response.internalResponse - // 15. If internalResponse’s URL list is empty, then set it to a clone of - // request’s URL list. - if (internalResponse.urlList.length === 0) { - internalResponse.urlList.push(...request.urlList) - } + // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. + const bytes = textEncoder.encode( + serializeJavascriptValueToJSONString(data) + ) - // 16. If request’s timing allow failed flag is unset, then set - // internalResponse’s timing allow passed flag. - if (!request.timingAllowFailed) { - response.timingAllowPassed = true - } + // 2. Let body be the result of extracting bytes. + const body = extractBody(bytes) - // 17. If response is not a network error and any of the following returns - // blocked - // - should internalResponse to request be blocked as mixed content - // - should internalResponse to request be blocked by Content Security Policy - // - should internalResponse to request be blocked due to its MIME type - // - should internalResponse to request be blocked due to nosniff - // TODO + // 3. Let responseObject be the result of creating a Response object, given a new response, + // "response", and this’s relevant Realm. + const relevantRealm = { settingsObject: {} } + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'response' + responseObject[kHeaders][kRealm] = relevantRealm - // 18. If response’s type is "opaque", internalResponse’s status is 206, - // internalResponse’s range-requested flag is set, and request’s header - // list does not contain `Range`, then set response and internalResponse - // to a network error. - if ( - response.type === 'opaque' && - internalResponse.status === 206 && - internalResponse.rangeRequested && - !request.headers.contains('range') - ) { - response = internalResponse = makeNetworkError() - } + // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). + initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) - // 19. If response is not a network error and either request’s method is - // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, - // set internalResponse’s body to null and disregard any enqueuing toward - // it (if any). - if ( - response.status !== 0 && - (request.method === 'HEAD' || - request.method === 'CONNECT' || - nullBodyStatus.includes(internalResponse.status)) - ) { - internalResponse.body = null - fetchParams.controller.dump = true + // 5. Return responseObject. + return responseObject } - // 20. If request’s integrity metadata is not the empty string, then: - if (request.integrity) { - // 1. Let processBodyError be this step: run fetch finale given fetchParams - // and a network error. - const processBodyError = (reason) => - fetchFinale(fetchParams, makeNetworkError(reason)) + // Creates a redirect Response that redirects to url with status status. + static redirect (url, status = 302) { + const relevantRealm = { settingsObject: {} } - // 2. If request’s response tainting is "opaque", or response’s body is null, - // then run processBodyError and abort these steps. - if (request.responseTainting === 'opaque' || response.body == null) { - processBodyError(response.error) - return - } + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) - // 3. Let processBody given bytes be these steps: - const processBody = (bytes) => { - // 1. If bytes do not match request’s integrity metadata, - // then run processBodyError and abort these steps. [SRI] - if (!bytesMatch(bytes, request.integrity)) { - processBodyError('integrity mismatch') - return - } + url = webidl.converters.USVString(url) + status = webidl.converters['unsigned short'](status) - // 2. Set response’s body to bytes as a body. - response.body = safelyExtractBody(bytes)[0] + // 1. Let parsedURL be the result of parsing url with current settings + // object’s API base URL. + // 2. If parsedURL is failure, then throw a TypeError. + // TODO: base-URL? + let parsedURL + try { + parsedURL = new URL(url, getGlobalOrigin()) + } catch (err) { + throw Object.assign(new TypeError('Failed to parse URL from ' + url), { + cause: err + }) + } - // 3. Run fetch finale given fetchParams and response. - fetchFinale(fetchParams, response) + // 3. If status is not a redirect status, then throw a RangeError. + if (!redirectStatusSet.has(status)) { + throw new RangeError('Invalid status code ' + status) } - // 4. Fully read response’s body given processBody and processBodyError. - await fullyReadBody(response.body, processBody, processBodyError) - } else { - // 21. Otherwise, run fetch finale given fetchParams and response. - fetchFinale(fetchParams, response) - } -} + // 4. Let responseObject be the result of creating a Response object, + // given a new response, "immutable", and this’s relevant Realm. + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm -// https://fetch.spec.whatwg.org/#concept-scheme-fetch -// given a fetch params fetchParams -function schemeFetch (fetchParams) { - // Note: since the connection is destroyed on redirect, which sets fetchParams to a - // cancelled state, we do not want this condition to trigger *unless* there have been - // no redirects. See https://github.com/nodejs/undici/issues/1776 - // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. - if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { - return Promise.resolve(makeAppropriateNetworkError(fetchParams)) - } + // 5. Set responseObject’s response’s status to status. + responseObject[kState].status = status - // 2. Let request be fetchParams’s request. - const { request } = fetchParams + // 6. Let value be parsedURL, serialized and isomorphic encoded. + const value = isomorphicEncode(URLSerializer(parsedURL)) - const { protocol: scheme } = requestCurrentURL(request) + // 7. Append `Location`/value to responseObject’s response’s header list. + responseObject[kState].headersList.append('location', value) - // 3. Switch on request’s current URL’s scheme and run the associated steps: - switch (scheme) { - case 'about:': { - // If request’s current URL’s path is the string "blank", then return a new response - // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », - // and body is the empty byte sequence as a body. + // 8. Return responseObject. + return responseObject + } - // Otherwise, return a network error. - return Promise.resolve(makeNetworkError('about scheme is not supported')) + // https://fetch.spec.whatwg.org/#dom-response + constructor (body = null, init = {}) { + if (body !== null) { + body = webidl.converters.BodyInit(body) } - case 'blob:': { - if (!resolveObjectURL) { - resolveObjectURL = (__nccwpck_require__(4300).resolveObjectURL) - } - - // 1. Let blobURLEntry be request’s current URL’s blob URL entry. - const blobURLEntry = requestCurrentURL(request) - - // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 - // Buffer.resolveObjectURL does not ignore URL queries. - if (blobURLEntry.search.length !== 0) { - return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) - } - - const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) - // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s - // object is not a Blob object, then return a network error. - if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { - return Promise.resolve(makeNetworkError('invalid method')) - } + init = webidl.converters.ResponseInit(init) - // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. - const bodyWithType = safelyExtractBody(blobURLEntryObject) + // TODO + this[kRealm] = { settingsObject: {} } - // 4. Let body be bodyWithType’s body. - const body = bodyWithType[0] + // 1. Set this’s response to a new response. + this[kState] = makeResponse({}) - // 5. Let length be body’s length, serialized and isomorphic encoded. - const length = isomorphicEncode(`${body.length}`) + // 2. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is this’s response’s header list and guard + // is "response". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kGuard] = 'response' + this[kHeaders][kHeadersList] = this[kState].headersList + this[kHeaders][kRealm] = this[kRealm] - // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. - const type = bodyWithType[1] ?? '' + // 3. Let bodyWithType be null. + let bodyWithType = null - // 7. Return a new response whose status message is `OK`, header list is - // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. - const response = makeResponse({ - statusText: 'OK', - headersList: [ - ['content-length', { name: 'Content-Length', value: length }], - ['content-type', { name: 'Content-Type', value: type }] - ] - }) + // 4. If body is non-null, then set bodyWithType to the result of extracting body. + if (body != null) { + const [extractedBody, type] = extractBody(body) + bodyWithType = { body: extractedBody, type } + } - response.body = body + // 5. Perform initialize a response given this, init, and bodyWithType. + initializeResponse(this, init, bodyWithType) + } - return Promise.resolve(response) - } - case 'data:': { - // 1. Let dataURLStruct be the result of running the - // data: URL processor on request’s current URL. - const currentURL = requestCurrentURL(request) - const dataURLStruct = dataURLProcessor(currentURL) + // Returns response’s type, e.g., "cors". + get type () { + webidl.brandCheck(this, Response) - // 2. If dataURLStruct is failure, then return a - // network error. - if (dataURLStruct === 'failure') { - return Promise.resolve(makeNetworkError('failed to fetch the data URL')) - } + // The type getter steps are to return this’s response’s type. + return this[kState].type + } - // 3. Let mimeType be dataURLStruct’s MIME type, serialized. - const mimeType = serializeAMimeType(dataURLStruct.mimeType) + // Returns response’s URL, if it has one; otherwise the empty string. + get url () { + webidl.brandCheck(this, Response) - // 4. Return a response whose status message is `OK`, - // header list is « (`Content-Type`, mimeType) », - // and body is dataURLStruct’s body as a body. - return Promise.resolve(makeResponse({ - statusText: 'OK', - headersList: [ - ['content-type', { name: 'Content-Type', value: mimeType }] - ], - body: safelyExtractBody(dataURLStruct.body)[0] - })) - } - case 'file:': { - // For now, unfortunate as it is, file URLs are left as an exercise for the reader. - // When in doubt, return a network error. - return Promise.resolve(makeNetworkError('not implemented... yet...')) - } - case 'http:': - case 'https:': { - // Return the result of running HTTP fetch given fetchParams. + const urlList = this[kState].urlList - return httpFetch(fetchParams) - .catch((err) => makeNetworkError(err)) - } - default: { - return Promise.resolve(makeNetworkError('unknown scheme')) + // The url getter steps are to return the empty string if this’s + // response’s URL is null; otherwise this’s response’s URL, + // serialized with exclude fragment set to true. + const url = urlList[urlList.length - 1] ?? null + + if (url === null) { + return '' } + + return URLSerializer(url, true) } -} -// https://fetch.spec.whatwg.org/#finalize-response -function finalizeResponse (fetchParams, response) { - // 1. Set fetchParams’s request’s done flag. - fetchParams.request.done = true + // Returns whether response was obtained through a redirect. + get redirected () { + webidl.brandCheck(this, Response) - // 2, If fetchParams’s process response done is not null, then queue a fetch - // task to run fetchParams’s process response done given response, with - // fetchParams’s task destination. - if (fetchParams.processResponseDone != null) { - queueMicrotask(() => fetchParams.processResponseDone(response)) + // The redirected getter steps are to return true if this’s response’s URL + // list has more than one item; otherwise false. + return this[kState].urlList.length > 1 } -} -// https://fetch.spec.whatwg.org/#fetch-finale -function fetchFinale (fetchParams, response) { - // 1. If response is a network error, then: - if (response.type === 'error') { - // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». - response.urlList = [fetchParams.request.urlList[0]] + // Returns response’s status. + get status () { + webidl.brandCheck(this, Response) - // 2. Set response’s timing info to the result of creating an opaque timing - // info for fetchParams’s timing info. - response.timingInfo = createOpaqueTimingInfo({ - startTime: fetchParams.timingInfo.startTime - }) + // The status getter steps are to return this’s response’s status. + return this[kState].status } - // 2. Let processResponseEndOfBody be the following steps: - const processResponseEndOfBody = () => { - // 1. Set fetchParams’s request’s done flag. - fetchParams.request.done = true + // Returns whether response’s status is an ok status. + get ok () { + webidl.brandCheck(this, Response) - // If fetchParams’s process response end-of-body is not null, - // then queue a fetch task to run fetchParams’s process response - // end-of-body given response with fetchParams’s task destination. - if (fetchParams.processResponseEndOfBody != null) { - queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) - } + // The ok getter steps are to return true if this’s response’s status is an + // ok status; otherwise false. + return this[kState].status >= 200 && this[kState].status <= 299 } - // 3. If fetchParams’s process response is non-null, then queue a fetch task - // to run fetchParams’s process response given response, with fetchParams’s - // task destination. - if (fetchParams.processResponse != null) { - queueMicrotask(() => fetchParams.processResponse(response)) - } + // Returns response’s status message. + get statusText () { + webidl.brandCheck(this, Response) - // 4. If response’s body is null, then run processResponseEndOfBody. - if (response.body == null) { - processResponseEndOfBody() - } else { - // 5. Otherwise: + // The statusText getter steps are to return this’s response’s status + // message. + return this[kState].statusText + } - // 1. Let transformStream be a new a TransformStream. + // Returns response’s headers as Headers. + get headers () { + webidl.brandCheck(this, Response) - // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, - // enqueues chunk in transformStream. - const identityTransformAlgorithm = (chunk, controller) => { - controller.enqueue(chunk) - } + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } - // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm - // and flushAlgorithm set to processResponseEndOfBody. - const transformStream = new TransformStream({ - start () {}, - transform: identityTransformAlgorithm, - flush: processResponseEndOfBody - }, { - size () { - return 1 - } - }, { - size () { - return 1 - } - }) + get body () { + webidl.brandCheck(this, Response) - // 4. Set response’s body to the result of piping response’s body through transformStream. - response.body = { stream: response.body.stream.pipeThrough(transformStream) } + return this[kState].body ? this[kState].body.stream : null } - // 6. If fetchParams’s process response consume body is non-null, then: - if (fetchParams.processResponseConsumeBody != null) { - // 1. Let processBody given nullOrBytes be this step: run fetchParams’s - // process response consume body given response and nullOrBytes. - const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) - - // 2. Let processBodyError be this step: run fetchParams’s process - // response consume body given response and failure. - const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + get bodyUsed () { + webidl.brandCheck(this, Response) - // 3. If response’s body is null, then queue a fetch task to run processBody - // given null, with fetchParams’s task destination. - if (response.body == null) { - queueMicrotask(() => processBody(null)) - } else { - // 4. Otherwise, fully read response’s body given processBody, processBodyError, - // and fetchParams’s task destination. - return fullyReadBody(response.body, processBody, processBodyError) - } - return Promise.resolve() + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) } -} -// https://fetch.spec.whatwg.org/#http-fetch -async function httpFetch (fetchParams) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request + // Returns a clone of response. + clone () { + webidl.brandCheck(this, Response) - // 2. Let response be null. - let response = null + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || (this.body && this.body.locked)) { + throw webidl.errors.exception({ + header: 'Response.clone', + message: 'Body has already been consumed.' + }) + } - // 3. Let actualResponse be null. - let actualResponse = null + // 2. Let clonedResponse be the result of cloning this’s response. + const clonedResponse = cloneResponse(this[kState]) - // 4. Let timingInfo be fetchParams’s timing info. - const timingInfo = fetchParams.timingInfo + // 3. Return the result of creating a Response object, given + // clonedResponse, this’s headers’s guard, and this’s relevant Realm. + const clonedResponseObject = new Response() + clonedResponseObject[kState] = clonedResponse + clonedResponseObject[kRealm] = this[kRealm] + clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList + clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] - // 5. If request’s service-workers mode is "all", then: - if (request.serviceWorkers === 'all') { - // TODO + return clonedResponseObject } +} - // 6. If response is null, then: - if (response === null) { - // 1. If makeCORSPreflight is true and one of these conditions is true: - // TODO +mixinBody(Response) - // 2. If request’s redirect mode is "follow", then set request’s - // service-workers mode to "none". - if (request.redirect === 'follow') { - request.serviceWorkers = 'none' - } +Object.defineProperties(Response.prototype, { + type: kEnumerableProperty, + url: kEnumerableProperty, + status: kEnumerableProperty, + ok: kEnumerableProperty, + redirected: kEnumerableProperty, + statusText: kEnumerableProperty, + headers: kEnumerableProperty, + clone: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Response', + configurable: true + } +}) - // 3. Set response and actualResponse to the result of running - // HTTP-network-or-cache fetch given fetchParams. - actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) +Object.defineProperties(Response, { + json: kEnumerableProperty, + redirect: kEnumerableProperty, + error: kEnumerableProperty +}) - // 4. If request’s response tainting is "cors" and a CORS check - // for request and response returns failure, then return a network error. - if ( - request.responseTainting === 'cors' && - corsCheck(request, response) === 'failure' - ) { - return makeNetworkError('cors failure') - } +// https://fetch.spec.whatwg.org/#concept-response-clone +function cloneResponse (response) { + // To clone a response response, run these steps: - // 5. If the TAO check for request and response returns failure, then set - // request’s timing allow failed flag. - if (TAOCheck(request, response) === 'failure') { - request.timingAllowFailed = true - } + // 1. If response is a filtered response, then return a new identical + // filtered response whose internal response is a clone of response’s + // internal response. + if (response.internalResponse) { + return filterResponse( + cloneResponse(response.internalResponse), + response.type + ) } - // 7. If either request’s response tainting or response’s type - // is "opaque", and the cross-origin resource policy check with - // request’s origin, request’s client, request’s destination, - // and actualResponse returns blocked, then return a network error. - if ( - (request.responseTainting === 'opaque' || response.type === 'opaque') && - crossOriginResourcePolicyCheck( - request.origin, - request.client, - request.destination, - actualResponse - ) === 'blocked' - ) { - return makeNetworkError('blocked') + // 2. Let newResponse be a copy of response, except for its body. + const newResponse = makeResponse({ ...response, body: null }) + + // 3. If response’s body is non-null, then set newResponse’s body to the + // result of cloning response’s body. + if (response.body != null) { + newResponse.body = cloneBody(response.body) } - // 8. If actualResponse’s status is a redirect status, then: - if (redirectStatusSet.has(actualResponse.status)) { - // 1. If actualResponse’s status is not 303, request’s body is not null, - // and the connection uses HTTP/2, then user agents may, and are even - // encouraged to, transmit an RST_STREAM frame. - // See, https://github.com/whatwg/fetch/issues/1288 - if (request.redirect !== 'manual') { - fetchParams.controller.connection.destroy() - } + // 4. Return newResponse. + return newResponse +} - // 2. Switch on request’s redirect mode: - if (request.redirect === 'error') { - // Set response to a network error. - response = makeNetworkError('unexpected redirect') - } else if (request.redirect === 'manual') { - // Set response to an opaque-redirect filtered response whose internal - // response is actualResponse. - // NOTE(spec): On the web this would return an `opaqueredirect` response, - // but that doesn't make sense server side. - // See https://github.com/nodejs/undici/issues/1193. - response = actualResponse - } else if (request.redirect === 'follow') { - // Set response to the result of running HTTP-redirect fetch given - // fetchParams and response. - response = await httpRedirectFetch(fetchParams, response) - } else { - assert(false) - } +function makeResponse (init) { + return { + aborted: false, + rangeRequested: false, + timingAllowPassed: false, + requestIncludesCredentials: false, + type: 'default', + status: 200, + timingInfo: null, + cacheState: '', + statusText: '', + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList(), + urlList: init.urlList ? [...init.urlList] : [] } +} - // 9. Set response’s timing info to timingInfo. - response.timingInfo = timingInfo +function makeNetworkError (reason) { + const isError = isErrorLike(reason) + return makeResponse({ + type: 'error', + status: 0, + error: isError + ? reason + : new Error(reason ? String(reason) : reason), + aborted: reason && reason.name === 'AbortError' + }) +} - // 10. Return response. - return response +function makeFilteredResponse (response, state) { + state = { + internalResponse: response, + ...state + } + + return new Proxy(response, { + get (target, p) { + return p in state ? state[p] : target[p] + }, + set (target, p, value) { + assert(!(p in state)) + target[p] = value + return true + } + }) } -// https://fetch.spec.whatwg.org/#http-redirect-fetch -function httpRedirectFetch (fetchParams, response) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request +// https://fetch.spec.whatwg.org/#concept-filtered-response +function filterResponse (response, type) { + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (type === 'basic') { + // A basic filtered response is a filtered response whose type is "basic" + // and header list excludes any headers in internal response’s header list + // whose name is a forbidden response-header name. - // 2. Let actualResponse be response, if response is not a filtered response, - // and response’s internal response otherwise. - const actualResponse = response.internalResponse - ? response.internalResponse - : response + // Note: undici does not implement forbidden response-header names + return makeFilteredResponse(response, { + type: 'basic', + headersList: response.headersList + }) + } else if (type === 'cors') { + // A CORS filtered response is a filtered response whose type is "cors" + // and header list excludes any headers in internal response’s header + // list whose name is not a CORS-safelisted response-header name, given + // internal response’s CORS-exposed header-name list. - // 3. Let locationURL be actualResponse’s location URL given request’s current - // URL’s fragment. - let locationURL + // Note: undici does not implement CORS-safelisted response-header names + return makeFilteredResponse(response, { + type: 'cors', + headersList: response.headersList + }) + } else if (type === 'opaque') { + // An opaque filtered response is a filtered response whose type is + // "opaque", URL list is the empty list, status is 0, status message + // is the empty byte sequence, header list is empty, and body is null. - try { - locationURL = responseLocationURL( - actualResponse, - requestCurrentURL(request).hash - ) + return makeFilteredResponse(response, { + type: 'opaque', + urlList: Object.freeze([]), + status: 0, + statusText: '', + body: null + }) + } else if (type === 'opaqueredirect') { + // An opaque-redirect filtered response is a filtered response whose type + // is "opaqueredirect", status is 0, status message is the empty byte + // sequence, header list is empty, and body is null. - // 4. If locationURL is null, then return response. - if (locationURL == null) { - return response - } - } catch (err) { - // 5. If locationURL is failure, then return a network error. - return Promise.resolve(makeNetworkError(err)) + return makeFilteredResponse(response, { + type: 'opaqueredirect', + status: 0, + statusText: '', + headersList: [], + body: null + }) + } else { + assert(false) } +} - // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network - // error. - if (!urlIsHttpHttpsScheme(locationURL)) { - return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) - } +// https://fetch.spec.whatwg.org/#appropriate-network-error +function makeAppropriateNetworkError (fetchParams, err = null) { + // 1. Assert: fetchParams is canceled. + assert(isCancelled(fetchParams)) - // 7. If request’s redirect count is 20, then return a network error. - if (request.redirectCount === 20) { - return Promise.resolve(makeNetworkError('redirect count exceeded')) + // 2. Return an aborted network error if fetchParams is aborted; + // otherwise return a network error. + return isAborted(fetchParams) + ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) + : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) +} + +// https://whatpr.org/fetch/1392.html#initialize-a-response +function initializeResponse (response, init, body) { + // 1. If init["status"] is not in the range 200 to 599, inclusive, then + // throw a RangeError. + if (init.status !== null && (init.status < 200 || init.status > 599)) { + throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') } - // 8. Increase request’s redirect count by 1. - request.redirectCount += 1 + // 2. If init["statusText"] does not match the reason-phrase token production, + // then throw a TypeError. + if ('statusText' in init && init.statusText != null) { + // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: + // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) + if (!isValidReasonPhrase(String(init.statusText))) { + throw new TypeError('Invalid statusText') + } + } - // 9. If request’s mode is "cors", locationURL includes credentials, and - // request’s origin is not same origin with locationURL’s origin, then return - // a network error. - if ( - request.mode === 'cors' && - (locationURL.username || locationURL.password) && - !sameOrigin(request, locationURL) - ) { - return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) + // 3. Set response’s response’s status to init["status"]. + if ('status' in init && init.status != null) { + response[kState].status = init.status } - // 10. If request’s response tainting is "cors" and locationURL includes - // credentials, then return a network error. - if ( - request.responseTainting === 'cors' && - (locationURL.username || locationURL.password) - ) { - return Promise.resolve(makeNetworkError( - 'URL cannot contain credentials for request mode "cors"' - )) + // 4. Set response’s response’s status message to init["statusText"]. + if ('statusText' in init && init.statusText != null) { + response[kState].statusText = init.statusText } - // 11. If actualResponse’s status is not 303, request’s body is non-null, - // and request’s body’s source is null, then return a network error. - if ( - actualResponse.status !== 303 && - request.body != null && - request.body.source == null - ) { - return Promise.resolve(makeNetworkError()) + // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. + if ('headers' in init && init.headers != null) { + fill(response[kHeaders], init.headers) } - // 12. If one of the following is true - // - actualResponse’s status is 301 or 302 and request’s method is `POST` - // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` - if ( - ([301, 302].includes(actualResponse.status) && request.method === 'POST') || - (actualResponse.status === 303 && - !GET_OR_HEAD.includes(request.method)) - ) { - // then: - // 1. Set request’s method to `GET` and request’s body to null. - request.method = 'GET' - request.body = null + // 6. If body was given, then: + if (body) { + // 1. If response's status is a null body status, then throw a TypeError. + if (nullBodyStatus.includes(response.status)) { + throw webidl.errors.exception({ + header: 'Response constructor', + message: 'Invalid response status code ' + response.status + }) + } - // 2. For each headerName of request-body-header name, delete headerName from - // request’s header list. - for (const headerName of requestBodyHeader) { - request.headersList.delete(headerName) + // 2. Set response's body to body's body. + response[kState].body = body.body + + // 3. If body's type is non-null and response's header list does not contain + // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. + if (body.type != null && !response[kState].headersList.contains('Content-Type')) { + response[kState].headersList.append('content-type', body.type) } } +} + +webidl.converters.ReadableStream = webidl.interfaceConverter( + ReadableStream +) + +webidl.converters.FormData = webidl.interfaceConverter( + FormData +) + +webidl.converters.URLSearchParams = webidl.interfaceConverter( + URLSearchParams +) + +// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit +webidl.converters.XMLHttpRequestBodyInit = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } - // 13. If request’s current URL’s origin is not same origin with locationURL’s - // origin, then for each headerName of CORS non-wildcard request-header name, - // delete headerName from request’s header list. - if (!sameOrigin(requestCurrentURL(request), locationURL)) { - // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name - request.headersList.delete('authorization') + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } - // https://fetch.spec.whatwg.org/#authentication-entries - request.headersList.delete('proxy-authorization', true) + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + return webidl.converters.BufferSource(V) + } - // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. - request.headersList.delete('cookie') - request.headersList.delete('host') + if (util.isFormDataLike(V)) { + return webidl.converters.FormData(V, { strict: false }) } - // 14. If request’s body is non-null, then set request’s body to the first return - // value of safely extracting request’s body’s source. - if (request.body != null) { - assert(request.body.source != null) - request.body = safelyExtractBody(request.body.source)[0] + if (V instanceof URLSearchParams) { + return webidl.converters.URLSearchParams(V) } - // 15. Let timingInfo be fetchParams’s timing info. - const timingInfo = fetchParams.timingInfo + return webidl.converters.DOMString(V) +} - // 16. Set timingInfo’s redirect end time and post-redirect start time to the - // coarsened shared current time given fetchParams’s cross-origin isolated - // capability. - timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = - coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) +// https://fetch.spec.whatwg.org/#bodyinit +webidl.converters.BodyInit = function (V) { + if (V instanceof ReadableStream) { + return webidl.converters.ReadableStream(V) + } - // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s - // redirect start time to timingInfo’s start time. - if (timingInfo.redirectStartTime === 0) { - timingInfo.redirectStartTime = timingInfo.startTime + // Note: the spec doesn't include async iterables, + // this is an undici extension. + if (V?.[Symbol.asyncIterator]) { + return V } - // 18. Append locationURL to request’s URL list. - request.urlList.push(locationURL) + return webidl.converters.XMLHttpRequestBodyInit(V) +} - // 19. Invoke set request’s referrer policy on redirect on request and - // actualResponse. - setRequestReferrerPolicyOnRedirect(request, actualResponse) +webidl.converters.ResponseInit = webidl.dictionaryConverter([ + { + key: 'status', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + }, + { + key: 'statusText', + converter: webidl.converters.ByteString, + defaultValue: '' + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + } +]) - // 20. Return the result of running main fetch given fetchParams and true. - return mainFetch(fetchParams, true) +module.exports = { + makeNetworkError, + makeResponse, + makeAppropriateNetworkError, + filterResponse, + Response, + cloneResponse } -// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch -async function httpNetworkOrCacheFetch ( - fetchParams, - isAuthenticationFetch = false, - isNewConnectionFetch = false -) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request - // 2. Let httpFetchParams be null. - let httpFetchParams = null +/***/ }), - // 3. Let httpRequest be null. - let httpRequest = null +/***/ 15861: +/***/ ((module) => { - // 4. Let response be null. - let response = null +"use strict"; - // 5. Let storedResponse be null. - // TODO: cache - // 6. Let httpCache be null. - const httpCache = null +module.exports = { + kUrl: Symbol('url'), + kHeaders: Symbol('headers'), + kSignal: Symbol('signal'), + kState: Symbol('state'), + kGuard: Symbol('guard'), + kRealm: Symbol('realm') +} - // 7. Let the revalidatingFlag be unset. - const revalidatingFlag = false - // 8. Run these steps, but abort when the ongoing fetch is terminated: +/***/ }), - // 1. If request’s window is "no-window" and request’s redirect mode is - // "error", then set httpFetchParams to fetchParams and httpRequest to - // request. - if (request.window === 'no-window' && request.redirect === 'error') { - httpFetchParams = fetchParams - httpRequest = request - } else { - // Otherwise: +/***/ 52538: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 1. Set httpRequest to a clone of request. - httpRequest = makeRequest(request) +"use strict"; - // 2. Set httpFetchParams to a copy of fetchParams. - httpFetchParams = { ...fetchParams } - // 3. Set httpFetchParams’s request to httpRequest. - httpFetchParams.request = httpRequest - } +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(41037) +const { getGlobalOrigin } = __nccwpck_require__(71246) +const { performance } = __nccwpck_require__(4074) +const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(83983) +const assert = __nccwpck_require__(39491) +const { isUint8Array } = __nccwpck_require__(29830) - // 3. Let includeCredentials be true if one of - const includeCredentials = - request.credentials === 'include' || - (request.credentials === 'same-origin' && - request.responseTainting === 'basic') +let supportedHashes = [] - // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s - // body is non-null; otherwise null. - const contentLength = httpRequest.body ? httpRequest.body.length : null +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto - // 5. Let contentLengthHeaderValue be null. - let contentLengthHeaderValue = null +try { + crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ +} catch { +} - // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or - // `PUT`, then set contentLengthHeaderValue to `0`. - if ( - httpRequest.body == null && - ['POST', 'PUT'].includes(httpRequest.method) - ) { - contentLengthHeaderValue = '0' +function responseURL (response) { + // https://fetch.spec.whatwg.org/#responses + // A response has an associated URL. It is a pointer to the last URL + // in response’s URL list and null if response’s URL list is empty. + const urlList = response.urlList + const length = urlList.length + return length === 0 ? null : urlList[length - 1].toString() +} + +// https://fetch.spec.whatwg.org/#concept-response-location-url +function responseLocationURL (response, requestFragment) { + // 1. If response’s status is not a redirect status, then return null. + if (!redirectStatusSet.has(response.status)) { + return null } - // 7. If contentLength is non-null, then set contentLengthHeaderValue to - // contentLength, serialized and isomorphic encoded. - if (contentLength != null) { - contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) + // 2. Let location be the result of extracting header list values given + // `Location` and response’s header list. + let location = response.headersList.get('location') + + // 3. If location is a header value, then set location to the result of + // parsing location with response’s URL. + if (location !== null && isValidHeaderValue(location)) { + location = new URL(location, responseURL(response)) } - // 8. If contentLengthHeaderValue is non-null, then append - // `Content-Length`/contentLengthHeaderValue to httpRequest’s header - // list. - if (contentLengthHeaderValue != null) { - httpRequest.headersList.append('content-length', contentLengthHeaderValue) + // 4. If location is a URL whose fragment is null, then set location’s + // fragment to requestFragment. + if (location && !location.hash) { + location.hash = requestFragment } - // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, - // contentLengthHeaderValue) to httpRequest’s header list. + // 5. Return location. + return location +} - // 10. If contentLength is non-null and httpRequest’s keepalive is true, - // then: - if (contentLength != null && httpRequest.keepalive) { - // NOTE: keepalive is a noop outside of browser context. - } +/** @returns {URL} */ +function requestCurrentURL (request) { + return request.urlList[request.urlList.length - 1] +} - // 11. If httpRequest’s referrer is a URL, then append - // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, - // to httpRequest’s header list. - if (httpRequest.referrer instanceof URL) { - httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) +function requestBadPort (request) { + // 1. Let url be request’s current URL. + const url = requestCurrentURL(request) + + // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, + // then return blocked. + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { + return 'blocked' } - // 12. Append a request `Origin` header for httpRequest. - appendRequestOriginHeader(httpRequest) + // 3. Return allowed. + return 'allowed' +} - // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] - appendFetchMetadata(httpRequest) +function isErrorLike (object) { + return object instanceof Error || ( + object?.constructor?.name === 'Error' || + object?.constructor?.name === 'DOMException' + ) +} - // 14. If httpRequest’s header list does not contain `User-Agent`, then - // user agents should append `User-Agent`/default `User-Agent` value to - // httpRequest’s header list. - if (!httpRequest.headersList.contains('user-agent')) { - httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') +// Check whether |statusText| is a ByteString and +// matches the Reason-Phrase token production. +// RFC 2616: https://tools.ietf.org/html/rfc2616 +// RFC 7230: https://tools.ietf.org/html/rfc7230 +// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" +// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 +function isValidReasonPhrase (statusText) { + for (let i = 0; i < statusText.length; ++i) { + const c = statusText.charCodeAt(i) + if ( + !( + ( + c === 0x09 || // HTAB + (c >= 0x20 && c <= 0x7e) || // SP / VCHAR + (c >= 0x80 && c <= 0xff) + ) // obs-text + ) + ) { + return false + } } + return true +} - // 15. If httpRequest’s cache mode is "default" and httpRequest’s header - // list contains `If-Modified-Since`, `If-None-Match`, - // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set - // httpRequest’s cache mode to "no-store". +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } +} + +/** + * @param {string} characters + */ +function isValidHTTPToken (characters) { + if (characters.length === 0) { + return false + } + for (let i = 0; i < characters.length; ++i) { + if (!isTokenCharCode(characters.charCodeAt(i))) { + return false + } + } + return true +} + +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ +function isValidHeaderName (potentialValue) { + return isValidHTTPToken(potentialValue) +} + +/** + * @see https://fetch.spec.whatwg.org/#header-value + * @param {string} potentialValue + */ +function isValidHeaderValue (potentialValue) { + // - Has no leading or trailing HTTP tab or space bytes. + // - Contains no 0x00 (NUL) or HTTP newline bytes. if ( - httpRequest.cache === 'default' && - (httpRequest.headersList.contains('if-modified-since') || - httpRequest.headersList.contains('if-none-match') || - httpRequest.headersList.contains('if-unmodified-since') || - httpRequest.headersList.contains('if-match') || - httpRequest.headersList.contains('if-range')) + potentialValue.startsWith('\t') || + potentialValue.startsWith(' ') || + potentialValue.endsWith('\t') || + potentialValue.endsWith(' ') ) { - httpRequest.cache = 'no-store' + return false } - // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent - // no-cache cache-control header modification flag is unset, and - // httpRequest’s header list does not contain `Cache-Control`, then append - // `Cache-Control`/`max-age=0` to httpRequest’s header list. if ( - httpRequest.cache === 'no-cache' && - !httpRequest.preventNoCacheCacheControlHeaderModification && - !httpRequest.headersList.contains('cache-control') + potentialValue.includes('\0') || + potentialValue.includes('\r') || + potentialValue.includes('\n') ) { - httpRequest.headersList.append('cache-control', 'max-age=0') + return false } - // 17. If httpRequest’s cache mode is "no-store" or "reload", then: - if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { - // 1. If httpRequest’s header list does not contain `Pragma`, then append - // `Pragma`/`no-cache` to httpRequest’s header list. - if (!httpRequest.headersList.contains('pragma')) { - httpRequest.headersList.append('pragma', 'no-cache') - } + return true +} - // 2. If httpRequest’s header list does not contain `Cache-Control`, - // then append `Cache-Control`/`no-cache` to httpRequest’s header list. - if (!httpRequest.headersList.contains('cache-control')) { - httpRequest.headersList.append('cache-control', 'no-cache') +// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect +function setRequestReferrerPolicyOnRedirect (request, actualResponse) { + // Given a request request and a response actualResponse, this algorithm + // updates request’s referrer policy according to the Referrer-Policy + // header (if any) in actualResponse. + + // 1. Let policy be the result of executing § 8.1 Parse a referrer policy + // from a Referrer-Policy header on actualResponse. + + // 8.1 Parse a referrer policy from a Referrer-Policy header + // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. + const { headersList } = actualResponse + // 2. Let policy be the empty string. + // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. + // 4. Return policy. + const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + + // Note: As the referrer-policy can contain multiple policies + // separated by comma, we need to loop through all of them + // and pick the first valid one. + // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy + let policy = '' + if (policyHeader.length > 0) { + // The right-most policy takes precedence. + // The left-most policy is the fallback. + for (let i = policyHeader.length; i !== 0; i--) { + const token = policyHeader[i - 1].trim() + if (referrerPolicyTokens.has(token)) { + policy = token + break + } } } - // 18. If httpRequest’s header list contains `Range`, then append - // `Accept-Encoding`/`identity` to httpRequest’s header list. - if (httpRequest.headersList.contains('range')) { - httpRequest.headersList.append('accept-encoding', 'identity') + // 2. If policy is not the empty string, then set request’s referrer policy to policy. + if (policy !== '') { + request.referrerPolicy = policy } +} - // 19. Modify httpRequest’s header list per HTTP. Do not append a given - // header if httpRequest’s header list contains that header’s name. - // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 - if (!httpRequest.headersList.contains('accept-encoding')) { - if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { - httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') - } else { - httpRequest.headersList.append('accept-encoding', 'gzip, deflate') +// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check +function crossOriginResourcePolicyCheck () { + // TODO + return 'allowed' +} + +// https://fetch.spec.whatwg.org/#concept-cors-check +function corsCheck () { + // TODO + return 'success' +} + +// https://fetch.spec.whatwg.org/#concept-tao-check +function TAOCheck () { + // TODO + return 'success' +} + +function appendFetchMetadata (httpRequest) { + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + + // 1. Assert: r’s url is a potentially trustworthy URL. + // TODO + + // 2. Let header be a Structured Header whose value is a token. + let header = null + + // 3. Set header’s value to r’s mode. + header = httpRequest.mode + + // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. + httpRequest.headersList.set('sec-fetch-mode', header) + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header + // TODO +} + +// https://fetch.spec.whatwg.org/#append-a-request-origin-header +function appendRequestOriginHeader (request) { + // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. + let serializedOrigin = request.origin + + // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. + if (request.responseTainting === 'cors' || request.mode === 'websocket') { + if (serializedOrigin) { + request.headersList.append('origin', serializedOrigin) + } + + // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: + } else if (request.method !== 'GET' && request.method !== 'HEAD') { + // 1. Switch on request’s referrer policy: + switch (request.referrerPolicy) { + case 'no-referrer': + // Set serializedOrigin to `null`. + serializedOrigin = null + break + case 'no-referrer-when-downgrade': + case 'strict-origin': + case 'strict-origin-when-cross-origin': + // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { + serializedOrigin = null + } + break + case 'same-origin': + // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. + if (!sameOrigin(request, requestCurrentURL(request))) { + serializedOrigin = null + } + break + default: + // Do nothing. + } + + if (serializedOrigin) { + // 2. Append (`Origin`, serializedOrigin) to request’s header list. + request.headersList.append('origin', serializedOrigin) } } +} - httpRequest.headersList.delete('host') +function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { + // TODO + return performance.now() +} - // 20. If includeCredentials is true, then: - if (includeCredentials) { - // 1. If the user agent is not configured to block cookies for httpRequest - // (see section 7 of [COOKIES]), then: - // TODO: credentials - // 2. If httpRequest’s header list does not contain `Authorization`, then: - // TODO: credentials +// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info +function createOpaqueTimingInfo (timingInfo) { + return { + startTime: timingInfo.startTime ?? 0, + redirectStartTime: 0, + redirectEndTime: 0, + postRedirectStartTime: timingInfo.startTime ?? 0, + finalServiceWorkerStartTime: 0, + finalNetworkResponseStartTime: 0, + finalNetworkRequestStartTime: 0, + endTime: 0, + encodedBodySize: 0, + decodedBodySize: 0, + finalConnectionTimingInfo: null } +} - // 21. If there’s a proxy-authentication entry, use it as appropriate. - // TODO: proxy-authentication - - // 22. Set httpCache to the result of determining the HTTP cache - // partition, given httpRequest. - // TODO: cache - - // 23. If httpCache is null, then set httpRequest’s cache mode to - // "no-store". - if (httpCache == null) { - httpRequest.cache = 'no-store' +// https://html.spec.whatwg.org/multipage/origin.html#policy-container +function makePolicyContainer () { + // Note: the fetch spec doesn't make use of embedder policy or CSP list + return { + referrerPolicy: 'strict-origin-when-cross-origin' } +} - // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", - // then: - if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { - // TODO: cache +// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container +function clonePolicyContainer (policyContainer) { + return { + referrerPolicy: policyContainer.referrerPolicy } +} - // 9. If aborted, then return the appropriate network error for fetchParams. - // TODO +// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer +function determineRequestsReferrer (request) { + // 1. Let policy be request's referrer policy. + const policy = request.referrerPolicy - // 10. If response is null, then: - if (response == null) { - // 1. If httpRequest’s cache mode is "only-if-cached", then return a - // network error. - if (httpRequest.mode === 'only-if-cached') { - return makeNetworkError('only if cached') - } + // Note: policy cannot (shouldn't) be null or an empty string. + assert(policy) - // 2. Let forwardResponse be the result of running HTTP-network fetch - // given httpFetchParams, includeCredentials, and isNewConnectionFetch. - const forwardResponse = await httpNetworkFetch( - httpFetchParams, - includeCredentials, - isNewConnectionFetch - ) + // 2. Let environment be request’s client. - // 3. If httpRequest’s method is unsafe and forwardResponse’s status is - // in the range 200 to 399, inclusive, invalidate appropriate stored - // responses in httpCache, as per the "Invalidation" chapter of HTTP - // Caching, and set storedResponse to null. [HTTP-CACHING] - if ( - !safeMethodsSet.has(httpRequest.method) && - forwardResponse.status >= 200 && - forwardResponse.status <= 399 - ) { - // TODO: cache - } + let referrerSource = null - // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, - // then: - if (revalidatingFlag && forwardResponse.status === 304) { - // TODO: cache - } + // 3. Switch on request’s referrer: + if (request.referrer === 'client') { + // Note: node isn't a browser and doesn't implement document/iframes, + // so we bypass this step and replace it with our own. - // 5. If response is null, then: - if (response == null) { - // 1. Set response to forwardResponse. - response = forwardResponse + const globalOrigin = getGlobalOrigin() - // 2. Store httpRequest and forwardResponse in httpCache, as per the - // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] - // TODO: cache + if (!globalOrigin || globalOrigin.origin === 'null') { + return 'no-referrer' } + + // note: we need to clone it as it's mutated + referrerSource = new URL(globalOrigin) + } else if (request.referrer instanceof URL) { + // Let referrerSource be request’s referrer. + referrerSource = request.referrer } - // 11. Set response’s URL list to a clone of httpRequest’s URL list. - response.urlList = [...httpRequest.urlList] + // 4. Let request’s referrerURL be the result of stripping referrerSource for + // use as a referrer. + let referrerURL = stripURLForReferrer(referrerSource) - // 12. If httpRequest’s header list contains `Range`, then set response’s - // range-requested flag. - if (httpRequest.headersList.contains('range')) { - response.rangeRequested = true + // 5. Let referrerOrigin be the result of stripping referrerSource for use as + // a referrer, with the origin-only flag set to true. + const referrerOrigin = stripURLForReferrer(referrerSource, true) + + // 6. If the result of serializing referrerURL is a string whose length is + // greater than 4096, set referrerURL to referrerOrigin. + if (referrerURL.toString().length > 4096) { + referrerURL = referrerOrigin } - // 13. Set response’s request-includes-credentials to includeCredentials. - response.requestIncludesCredentials = includeCredentials + const areSameOrigin = sameOrigin(request, referrerURL) + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && + !isURLPotentiallyTrustworthy(request.url) - // 14. If response’s status is 401, httpRequest’s response tainting is not - // "cors", includeCredentials is true, and request’s window is an environment - // settings object, then: - // TODO + // 8. Execute the switch statements corresponding to the value of policy: + switch (policy) { + case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) + case 'unsafe-url': return referrerURL + case 'same-origin': + return areSameOrigin ? referrerOrigin : 'no-referrer' + case 'origin-when-cross-origin': + return areSameOrigin ? referrerURL : referrerOrigin + case 'strict-origin-when-cross-origin': { + const currentURL = requestCurrentURL(request) - // 15. If response’s status is 407, then: - if (response.status === 407) { - // 1. If request’s window is "no-window", then return a network error. - if (request.window === 'no-window') { - return makeNetworkError() - } + // 1. If the origin of referrerURL and the origin of request’s current + // URL are the same, then return referrerURL. + if (sameOrigin(referrerURL, currentURL)) { + return referrerURL + } - // 2. ??? + // 2. If referrerURL is a potentially trustworthy URL and request’s + // current URL is not a potentially trustworthy URL, then return no + // referrer. + if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { + return 'no-referrer' + } - // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. - if (isCancelled(fetchParams)) { - return makeAppropriateNetworkError(fetchParams) + // 3. Return referrerOrigin. + return referrerOrigin } + case 'strict-origin': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + case 'no-referrer-when-downgrade': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ - // 4. Prompt the end user as appropriate in request’s window and store - // the result as a proxy-authentication entry. [HTTP-AUTH] - // TODO: Invoke some kind of callback? + default: // eslint-disable-line + return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin + } +} - // 5. Set response to the result of running HTTP-network-or-cache fetch given - // fetchParams. - // TODO - return makeNetworkError('proxy authentication required') +/** + * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url + * @param {URL} url + * @param {boolean|undefined} originOnly + */ +function stripURLForReferrer (url, originOnly) { + // 1. Assert: url is a URL. + assert(url instanceof URL) + + // 2. If url’s scheme is a local scheme, then return no referrer. + if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { + return 'no-referrer' } - // 16. If all of the following are true - if ( - // response’s status is 421 - response.status === 421 && - // isNewConnectionFetch is false - !isNewConnectionFetch && - // request’s body is null, or request’s body is non-null and request’s body’s source is non-null - (request.body == null || request.body.source != null) - ) { - // then: + // 3. Set url’s username to the empty string. + url.username = '' - // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. - if (isCancelled(fetchParams)) { - return makeAppropriateNetworkError(fetchParams) - } + // 4. Set url’s password to the empty string. + url.password = '' - // 2. Set response to the result of running HTTP-network-or-cache - // fetch given fetchParams, isAuthenticationFetch, and true. + // 5. Set url’s fragment to null. + url.hash = '' - // TODO (spec): The spec doesn't specify this but we need to cancel - // the active response before we can start a new one. - // https://github.com/whatwg/fetch/issues/1293 - fetchParams.controller.connection.destroy() + // 6. If the origin-only flag is true, then: + if (originOnly) { + // 1. Set url’s path to « the empty string ». + url.pathname = '' - response = await httpNetworkOrCacheFetch( - fetchParams, - isAuthenticationFetch, - true - ) + // 2. Set url’s query to null. + url.search = '' } - // 17. If isAuthenticationFetch is true, then create an authentication entry - if (isAuthenticationFetch) { - // TODO + // 7. Return url. + return url +} + +function isURLPotentiallyTrustworthy (url) { + if (!(url instanceof URL)) { + return false } - // 18. Return response. - return response -} + // If child of about, return true + if (url.href === 'about:blank' || url.href === 'about:srcdoc') { + return true + } -// https://fetch.spec.whatwg.org/#http-network-fetch -async function httpNetworkFetch ( - fetchParams, - includeCredentials = false, - forceNewConnection = false -) { - assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) + // If scheme is data, return true + if (url.protocol === 'data:') return true - fetchParams.controller.connection = { - abort: null, - destroyed: false, - destroy (err) { - if (!this.destroyed) { - this.destroyed = true - this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) - } + // If file, return true + if (url.protocol === 'file:') return true + + return isOriginPotentiallyTrustworthy(url.origin) + + function isOriginPotentiallyTrustworthy (origin) { + // If origin is explicitly null, return false + if (origin == null || origin === 'null') return false + + const originAsURL = new URL(origin) + + // If secure, return true + if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { + return true } - } - // 1. Let request be fetchParams’s request. - const request = fetchParams.request + // If localhost or variants, return true + if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || + (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || + (originAsURL.hostname.endsWith('.localhost'))) { + return true + } - // 2. Let response be null. - let response = null + // If any other, return false + return false + } +} - // 3. Let timingInfo be fetchParams’s timing info. - const timingInfo = fetchParams.timingInfo +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true + } - // 4. Let httpCache be the result of determining the HTTP cache partition, - // given request. - // TODO: cache - const httpCache = null + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) - // 5. If httpCache is null, then set request’s cache mode to "no-store". - if (httpCache == null) { - request.cache = 'no-store' + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true } - // 6. Let networkPartitionKey be the result of determining the network - // partition key given request. + // 3. If response is not eligible for integrity validation, return false. // TODO - // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise - // "no". - const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars - - // 8. Switch on request’s mode: - if (request.mode === 'websocket') { - // Let connection be the result of obtaining a WebSocket connection, - // given request’s current URL. - // TODO - } else { - // Let connection be the result of obtaining a connection, given - // networkPartitionKey, request’s current URL’s origin, - // includeCredentials, and forceNewConnection. - // TODO + // 4. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true } - // 9. Run these steps, but abort when the ongoing fetch is terminated: + // 5. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 1. If connection is failure, then return a network error. + // 6. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo - // 2. Set timingInfo’s final connection timing info to the result of - // calling clamp and coarsen connection timing info with connection’s - // timing info, timingInfo’s post-redirect start time, and fetchParams’s - // cross-origin isolated capability. + // 2. Let expectedValue be the val component of item. + const expectedValue = item.hash - // 3. If connection is not an HTTP/2 connection, request’s body is non-null, - // and request’s body’s source is null, then append (`Transfer-Encoding`, - // `chunked`) to request’s header list. + // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e + // "be liberal with padding". This is annoying, and it's not even in the spec. - // 4. Set timingInfo’s final network-request start time to the coarsened - // shared current time given fetchParams’s cross-origin isolated - // capability. + // 3. Let actualValue be the result of applying algorithm to bytes. + let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - // 5. Set response to the result of making an HTTP request over connection - // using request with the following caveats: + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } + } - // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] - // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (compareBase64Mixed(actualValue, expectedValue)) { + return true + } + } - // - If request’s body is non-null, and request’s body’s source is null, - // then the user agent may have a buffer of up to 64 kibibytes and store - // a part of request’s body in that buffer. If the user agent reads from - // request’s body beyond that buffer’s size and the user agent needs to - // resend request, then instead return a network error. + // 7. Return false. + return false +} - // - Set timingInfo’s final network-response start time to the coarsened - // shared current time given fetchParams’s cross-origin isolated capability, - // immediately after the user agent’s HTTP parser receives the first byte - // of the response (e.g., frame header bytes for HTTP/2 or response status - // line for HTTP/1.x). +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// https://www.w3.org/TR/CSP2/#source-list-syntax +// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i - // - Wait until all the headers are transmitted. +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] - // - Any responses whose status is in the range 100 to 199, inclusive, - // and is not 101, are to be ignored, except for the purposes of setting - // timingInfo’s final network-response start time above. + // 2. Let empty be equal to true. + let empty = true - // - If request’s header list contains `Transfer-Encoding`/`chunked` and - // response is transferred via HTTP/1.0 or older, then return a network - // error. + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false - // - If the HTTP request results in a TLS client certificate dialog, then: + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) - // 1. If request’s window is an environment settings object, make the - // dialog available in request’s window. + // 3. If token does not parse, continue to the next token. + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } - // 2. Otherwise, return a network error. + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo.toLowerCase() - // To transmit request’s body body, run these steps: - let requestBody = null - // 1. If body is null and fetchParams’s process request end-of-body is - // non-null, then queue a fetch task given fetchParams’s process request - // end-of-body and fetchParams’s task destination. - if (request.body == null && fetchParams.processRequestEndOfBody) { - queueMicrotask(() => fetchParams.processRequestEndOfBody()) - } else if (request.body != null) { - // 2. Otherwise, if body is non-null: + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm)) { + result.push(parsedToken.groups) + } + } - // 1. Let processBodyChunk given bytes be these steps: - const processBodyChunk = async function * (bytes) { - // 1. If the ongoing fetch is terminated, then abort these steps. - if (isCancelled(fetchParams)) { - return - } + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' + } - // 2. Run this step in parallel: transmit bytes. - yield bytes + return result +} - // 3. If fetchParams’s process request body is non-null, then run - // fetchParams’s process request body given bytes’s length. - fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' } + } + return algorithm +} - // 2. Let processEndOfBody be these steps: - const processEndOfBody = () => { - // 1. If fetchParams is canceled, then abort these steps. - if (isCancelled(fetchParams)) { - return - } +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } - // 2. If fetchParams’s process request end-of-body is non-null, - // then run fetchParams’s process request end-of-body. - if (fetchParams.processRequestEndOfBody) { - fetchParams.processRequestEndOfBody() + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue } + return false } + } + + return true +} + +// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request +function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { + // TODO +} + +/** + * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} + * @param {URL} A + * @param {URL} B + */ +function sameOrigin (A, B) { + // 1. If A and B are the same opaque origin, then return true. + if (A.origin === B.origin && A.origin === 'null') { + return true + } + + // 2. If A and B are both tuple origins and their schemes, + // hosts, and port are identical, then return true. + if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { + return true + } + + // 3. Return false. + return false +} - // 3. Let processBodyError given e be these steps: - const processBodyError = (e) => { - // 1. If fetchParams is canceled, then abort these steps. - if (isCancelled(fetchParams)) { - return - } +function createDeferredPromise () { + let res + let rej + const promise = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) - // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. - if (e.name === 'AbortError') { - fetchParams.controller.abort() - } else { - fetchParams.controller.terminate(e) - } - } + return { promise, resolve: res, reject: rej } +} - // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, - // processBodyError, and fetchParams’s task destination. - requestBody = (async function * () { - try { - for await (const bytes of request.body.stream) { - yield * processBodyChunk(bytes) - } - processEndOfBody() - } catch (err) { - processBodyError(err) - } - })() - } +function isAborted (fetchParams) { + return fetchParams.controller.state === 'aborted' +} - try { - // socket is only provided for websockets - const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) +function isCancelled (fetchParams) { + return fetchParams.controller.state === 'aborted' || + fetchParams.controller.state === 'terminated' +} - if (socket) { - response = makeResponse({ status, statusText, headersList, socket }) - } else { - const iterator = body[Symbol.asyncIterator]() - fetchParams.controller.next = () => iterator.next() +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} - response = makeResponse({ status, statusText, headersList }) - } - } catch (err) { - // 10. If aborted, then: - if (err.name === 'AbortError') { - // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. - fetchParams.controller.connection.destroy() +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) - // 2. Return the appropriate network error for fetchParams. - return makeAppropriateNetworkError(fetchParams, err) - } +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ +function normalizeMethod (method) { + return normalizeMethodRecord[method.toLowerCase()] ?? method +} - return makeNetworkError(err) - } +// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string +function serializeJavascriptValueToJSONString (value) { + // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). + const result = JSON.stringify(value) - // 11. Let pullAlgorithm be an action that resumes the ongoing fetch - // if it is suspended. - const pullAlgorithm = () => { - fetchParams.controller.resume() + // 2. If result is undefined, then throw a TypeError. + if (result === undefined) { + throw new TypeError('Value is not JSON serializable') } - // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s - // controller with reason, given reason. - const cancelAlgorithm = (reason) => { - fetchParams.controller.abort(reason) - } + // 3. Assert: result is a string. + assert(typeof result === 'string') - // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by - // the user agent. - // TODO + // 4. Return result. + return result +} - // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object - // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. - // TODO +// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object +const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) - // 15. Let stream be a new ReadableStream. - // 16. Set up stream with pullAlgorithm set to pullAlgorithm, - // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to - // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) +/** + * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + * @param {() => unknown[]} iterator + * @param {string} name name of the instance + * @param {'key'|'value'|'key+value'} kind + */ +function makeIterator (iterator, name, kind) { + const object = { + index: 0, + kind, + target: iterator } - const stream = new ReadableStream( - { - async start (controller) { - fetchParams.controller.controller = controller - }, - async pull (controller) { - await pullAlgorithm(controller) - }, - async cancel (reason) { - await cancelAlgorithm(reason) - } - }, - { - highWaterMark: 0, - size () { - return 1 - } - } - ) + const i = { + next () { + // 1. Let interface be the interface for which the iterator prototype object exists. - // 17. Run these steps, but abort when the ongoing fetch is terminated: + // 2. Let thisValue be the this value. - // 1. Set response’s body to a new body whose stream is stream. - response.body = { stream } + // 3. Let object be ? ToObject(thisValue). - // 2. If response is not a network error and request’s cache mode is - // not "no-store", then update response in httpCache for request. - // TODO + // 4. If object is a platform object, then perform a security + // check, passing: - // 3. If includeCredentials is true and the user agent is not configured - // to block cookies for request (see section 7 of [COOKIES]), then run the - // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on - // the value of each header whose name is a byte-case-insensitive match for - // `Set-Cookie` in response’s header list, if any, and request’s current URL. - // TODO + // 5. If object is not a default iterator object for interface, + // then throw a TypeError. + if (Object.getPrototypeOf(this) !== i) { + throw new TypeError( + `'next' called on an object that does not implement interface ${name} Iterator.` + ) + } - // 18. If aborted, then: - // TODO + // 6. Let index be object’s index. + // 7. Let kind be object’s kind. + // 8. Let values be object’s target's value pairs to iterate over. + const { index, kind, target } = object + const values = target() - // 19. Run these steps in parallel: + // 9. Let len be the length of values. + const len = values.length - // 1. Run these steps, but abort when fetchParams is canceled: - fetchParams.controller.on('terminated', onAborted) - fetchParams.controller.resume = async () => { - // 1. While true - while (true) { - // 1-3. See onData... + // 10. If index is greater than or equal to len, then return + // CreateIterResultObject(undefined, true). + if (index >= len) { + return { value: undefined, done: true } + } - // 4. Set bytes to the result of handling content codings given - // codings and bytes. - let bytes - let isFailure - try { - const { done, value } = await fetchParams.controller.next() + // 11. Let pair be the entry in values at index index. + const pair = values[index] - if (isAborted(fetchParams)) { - break - } + // 12. Set object’s index to index + 1. + object.index = index + 1 - bytes = done ? undefined : value - } catch (err) { - if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { - // zlib doesn't like empty streams. - bytes = undefined - } else { - bytes = err + // 13. Return the iterator result for pair and kind. + return iteratorResult(pair, kind) + }, + // The class string of an iterator prototype object for a given interface is the + // result of concatenating the identifier of the interface and the string " Iterator". + [Symbol.toStringTag]: `${name} Iterator` + } - // err may be propagated from the result of calling readablestream.cancel, - // which might not be an error. https://github.com/nodejs/undici/issues/2009 - isFailure = true - } - } + // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. + Object.setPrototypeOf(i, esIteratorPrototype) + // esIteratorPrototype needs to be the prototype of i + // which is the prototype of an empty object. Yes, it's confusing. + return Object.setPrototypeOf({}, i) +} - if (bytes === undefined) { - // 2. Otherwise, if the bytes transmission for response’s message - // body is done normally and stream is readable, then close - // stream, finalize response for fetchParams and response, and - // abort these in-parallel steps. - readableStreamClose(fetchParams.controller.controller) +// https://webidl.spec.whatwg.org/#iterator-result +function iteratorResult (pair, kind) { + let result - finalizeResponse(fetchParams, response) + // 1. Let result be a value determined by the value of kind: + switch (kind) { + case 'key': { + // 1. Let idlKey be pair’s key. + // 2. Let key be the result of converting idlKey to an + // ECMAScript value. + // 3. result is key. + result = pair[0] + break + } + case 'value': { + // 1. Let idlValue be pair’s value. + // 2. Let value be the result of converting idlValue to + // an ECMAScript value. + // 3. result is value. + result = pair[1] + break + } + case 'key+value': { + // 1. Let idlKey be pair’s key. + // 2. Let idlValue be pair’s value. + // 3. Let key be the result of converting idlKey to an + // ECMAScript value. + // 4. Let value be the result of converting idlValue to + // an ECMAScript value. + // 5. Let array be ! ArrayCreate(2). + // 6. Call ! CreateDataProperty(array, "0", key). + // 7. Call ! CreateDataProperty(array, "1", value). + // 8. result is array. + result = pair + break + } + } - return - } + // 2. Return CreateIterResultObject(result, false). + return { value: result, done: false } +} - // 5. Increase timingInfo’s decoded body size by bytes’s length. - timingInfo.decodedBodySize += bytes?.byteLength ?? 0 +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. - // 6. If bytes is failure, then terminate fetchParams’s controller. - if (isFailure) { - fetchParams.controller.terminate(bytes) - return - } + // 2. Let successSteps given a byte sequence bytes be to queue a + // fetch task to run processBody given bytes, with taskDestination. + const successSteps = processBody - // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes - // into stream. - fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + // 3. Let errorSteps be to queue a fetch task to run processBodyError, + // with taskDestination. + const errorSteps = processBodyError - // 8. If stream is errored, then terminate the ongoing fetch. - if (isErrored(stream)) { - fetchParams.controller.terminate() - return - } + // 4. Let reader be the result of getting a reader for body’s stream. + // If that threw an exception, then run errorSteps with that + // exception and return. + let reader - // 9. If stream doesn’t need more data ask the user agent to suspend - // the ongoing fetch. - if (!fetchParams.controller.controller.desiredSize) { - return - } - } + try { + reader = body.stream.getReader() + } catch (e) { + errorSteps(e) + return } - // 2. If aborted, then: - function onAborted (reason) { - // 2. If fetchParams is aborted, then: - if (isAborted(fetchParams)) { - // 1. Set response’s aborted flag. - response.aborted = true + // 5. Read all bytes from reader, given successSteps and errorSteps. + try { + const result = await readAllBytes(reader) + successSteps(result) + } catch (e) { + errorSteps(e) + } +} - // 2. If stream is readable, then error stream with the result of - // deserialize a serialized abort reason given fetchParams’s - // controller’s serialized abort reason and an - // implementation-defined realm. - if (isReadable(stream)) { - fetchParams.controller.controller.error( - fetchParams.controller.serializedAbortReason - ) - } - } else { - // 3. Otherwise, if stream is readable, error stream with a TypeError. - if (isReadable(stream)) { - fetchParams.controller.controller.error(new TypeError('terminated', { - cause: isErrorLike(reason) ? reason : undefined - })) - } - } +/** @type {ReadableStream} */ +let ReadableStream = globalThis.ReadableStream - // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. - // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. - fetchParams.controller.connection.destroy() +function isReadableStreamLike (stream) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(35356).ReadableStream) } - // 20. Return response. - return response + return stream instanceof ReadableStream || ( + stream[Symbol.toStringTag] === 'ReadableStream' && + typeof stream.tee === 'function' + ) +} - async function dispatch ({ body }) { - const url = requestCurrentURL(request) - /** @type {import('../..').Agent} */ - const agent = fetchParams.controller.dispatcher +const MAXIMUM_ARGUMENT_LENGTH = 65535 - return new Promise((resolve, reject) => agent.dispatch( - { - path: url.pathname + url.search, - origin: url.origin, - method: request.method, - body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, - headers: request.headersList.entries, - maxRedirections: 0, - upgrade: request.mode === 'websocket' ? 'websocket' : undefined - }, - { - body: null, - abort: null, +/** + * @see https://infra.spec.whatwg.org/#isomorphic-decode + * @param {number[]|Uint8Array} input + */ +function isomorphicDecode (input) { + // 1. To isomorphic decode a byte sequence input, return a string whose code point + // length is equal to input’s length and whose code points have the same values + // as the values of input’s bytes, in the same order. - onConnect (abort) { - // TODO (fix): Do we need connection here? - const { connection } = fetchParams.controller + if (input.length < MAXIMUM_ARGUMENT_LENGTH) { + return String.fromCharCode(...input) + } - if (connection.destroyed) { - abort(new DOMException('The operation was aborted.', 'AbortError')) - } else { - fetchParams.controller.on('terminated', abort) - this.abort = connection.abort = abort - } - }, + return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +} - onHeaders (status, headersList, resume, statusText) { - if (status < 200) { - return - } +/** + * @param {ReadableStreamController} controller + */ +function readableStreamClose (controller) { + try { + controller.close() + } catch (err) { + // TODO: add comment explaining why this error occurs. + if (!err.message.includes('Controller is already closed')) { + throw err + } + } +} - let codings = [] - let location = '' +/** + * @see https://infra.spec.whatwg.org/#isomorphic-encode + * @param {string} input + */ +function isomorphicEncode (input) { + // 1. Assert: input contains no code points greater than U+00FF. + for (let i = 0; i < input.length; i++) { + assert(input.charCodeAt(i) <= 0xFF) + } - const headers = new Headers() + // 2. Return a byte sequence whose length is equal to input’s code + // point length and whose bytes have the same values as the + // values of input’s code points, in the same order + return input +} - // For H2, the headers are a plain JS object - // We distinguish between them and iterate accordingly - if (Array.isArray(headersList)) { - for (let n = 0; n < headersList.length; n += 2) { - const key = headersList[n + 0].toString('latin1') - const val = headersList[n + 1].toString('latin1') - if (key.toLowerCase() === 'content-encoding') { - // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 - // "All content-coding values are case-insensitive..." - codings = val.toLowerCase().split(',').map((x) => x.trim()) - } else if (key.toLowerCase() === 'location') { - location = val - } +/** + * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes + * @see https://streams.spec.whatwg.org/#read-loop + * @param {ReadableStreamDefaultReader} reader + */ +async function readAllBytes (reader) { + const bytes = [] + let byteLength = 0 - headers[kHeadersList].append(key, val) - } - } else { - const keys = Object.keys(headersList) - for (const key of keys) { - const val = headersList[key] - if (key.toLowerCase() === 'content-encoding') { - // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 - // "All content-coding values are case-insensitive..." - codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() - } else if (key.toLowerCase() === 'location') { - location = val - } + while (true) { + const { done, value: chunk } = await reader.read() - headers[kHeadersList].append(key, val) - } - } + if (done) { + // 1. Call successSteps with bytes. + return Buffer.concat(bytes, byteLength) + } - this.body = new Readable({ read: resume }) + // 1. If chunk is not a Uint8Array object, call failureSteps + // with a TypeError and abort these steps. + if (!isUint8Array(chunk)) { + throw new TypeError('Received non-Uint8Array chunk') + } - const decoders = [] + // 2. Append the bytes represented by chunk to bytes. + bytes.push(chunk) + byteLength += chunk.length - const willFollow = request.redirect === 'follow' && - location && - redirectStatusSet.has(status) + // 3. Read-loop given reader, bytes, successSteps, and failureSteps. + } +} + +/** + * @see https://fetch.spec.whatwg.org/#is-local + * @param {URL} url + */ +function urlIsLocal (url) { + assert('protocol' in url) // ensure it's a url object - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding - if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { - for (const coding of codings) { - // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 - if (coding === 'x-gzip' || coding === 'gzip') { - decoders.push(zlib.createGunzip({ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH - })) - } else if (coding === 'deflate') { - decoders.push(zlib.createInflate()) - } else if (coding === 'br') { - decoders.push(zlib.createBrotliDecompress()) - } else { - decoders.length = 0 - break - } - } - } + const protocol = url.protocol - resolve({ - status, - statusText, - headersList: headers[kHeadersList], - body: decoders.length - ? pipeline(this.body, ...decoders, () => { }) - : this.body.on('error', () => {}) - }) + return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' +} - return true - }, +/** + * @param {string|URL} url + */ +function urlHasHttpsScheme (url) { + if (typeof url === 'string') { + return url.startsWith('https:') + } - onData (chunk) { - if (fetchParams.controller.dump) { - return - } + return url.protocol === 'https:' +} - // 1. If one or more bytes have been transmitted from response’s - // message body, then: +/** + * @see https://fetch.spec.whatwg.org/#http-scheme + * @param {URL} url + */ +function urlIsHttpHttpsScheme (url) { + assert('protocol' in url) // ensure it's a url object - // 1. Let bytes be the transmitted bytes. - const bytes = chunk + const protocol = url.protocol - // 2. Let codings be the result of extracting header list values - // given `Content-Encoding` and response’s header list. - // See pullAlgorithm. + return protocol === 'http:' || protocol === 'https:' +} - // 3. Increase timingInfo’s encoded body size by bytes’s length. - timingInfo.encodedBodySize += bytes.byteLength +/** + * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. + */ +const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) - // 4. See pullAlgorithm... +module.exports = { + isAborted, + isCancelled, + createDeferredPromise, + ReadableStreamFrom, + toUSVString, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + coarsenedSharedCurrentTime, + determineRequestsReferrer, + makePolicyContainer, + clonePolicyContainer, + appendFetchMetadata, + appendRequestOriginHeader, + TAOCheck, + corsCheck, + crossOriginResourcePolicyCheck, + createOpaqueTimingInfo, + setRequestReferrerPolicyOnRedirect, + isValidHTTPToken, + requestBadPort, + requestCurrentURL, + responseURL, + responseLocationURL, + isBlobLike, + isURLPotentiallyTrustworthy, + isValidReasonPhrase, + sameOrigin, + normalizeMethod, + serializeJavascriptValueToJSONString, + makeIterator, + isValidHeaderName, + isValidHeaderValue, + hasOwn, + isErrorLike, + fullyReadBody, + bytesMatch, + isReadableStreamLike, + readableStreamClose, + isomorphicEncode, + isomorphicDecode, + urlIsLocal, + urlHasHttpsScheme, + urlIsHttpHttpsScheme, + readAllBytes, + normalizeMethodRecord, + parseMetadata +} - return this.body.push(bytes) - }, - onComplete () { - if (this.abort) { - fetchParams.controller.off('terminated', this.abort) - } +/***/ }), - fetchParams.controller.ended = true +/***/ 21744: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.body.push(null) - }, +"use strict"; - onError (error) { - if (this.abort) { - fetchParams.controller.off('terminated', this.abort) - } - this.body?.destroy(error) +const { types } = __nccwpck_require__(73837) +const { hasOwn, toUSVString } = __nccwpck_require__(52538) - fetchParams.controller.terminate(error) +/** @type {import('../../types/webidl').Webidl} */ +const webidl = {} +webidl.converters = {} +webidl.util = {} +webidl.errors = {} - reject(error) - }, +webidl.errors.exception = function (message) { + return new TypeError(`${message.header}: ${message.message}`) +} - onUpgrade (status, headersList, socket) { - if (status !== 101) { - return - } +webidl.errors.conversionFailed = function (context) { + const plural = context.types.length === 1 ? '' : ' one of' + const message = + `${context.argument} could not be converted to` + + `${plural}: ${context.types.join(', ')}.` - const headers = new Headers() + return webidl.errors.exception({ + header: context.prefix, + message + }) +} - for (let n = 0; n < headersList.length; n += 2) { - const key = headersList[n + 0].toString('latin1') - const val = headersList[n + 1].toString('latin1') +webidl.errors.invalidArgument = function (context) { + return webidl.errors.exception({ + header: context.prefix, + message: `"${context.value}" is an invalid ${context.type}.` + }) +} - headers[kHeadersList].append(key, val) - } +// https://webidl.spec.whatwg.org/#implements +webidl.brandCheck = function (V, I, opts = undefined) { + if (opts?.strict !== false && !(V instanceof I)) { + throw new TypeError('Illegal invocation') + } else { + return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] + } +} - resolve({ - status, - statusText: STATUS_CODES[status], - headersList: headers[kHeadersList], - socket - }) +webidl.argumentLengthCheck = function ({ length }, min, ctx) { + if (length < min) { + throw webidl.errors.exception({ + message: `${min} argument${min !== 1 ? 's' : ''} required, ` + + `but${length ? ' only' : ''} ${length} found.`, + ...ctx + }) + } +} - return true - } +webidl.illegalConstructor = function () { + throw webidl.errors.exception({ + header: 'TypeError', + message: 'Illegal constructor' + }) +} + +// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +webidl.util.Type = function (V) { + switch (typeof V) { + case 'undefined': return 'Undefined' + case 'boolean': return 'Boolean' + case 'string': return 'String' + case 'symbol': return 'Symbol' + case 'number': return 'Number' + case 'bigint': return 'BigInt' + case 'function': + case 'object': { + if (V === null) { + return 'Null' } - )) + + return 'Object' + } } } -module.exports = { - fetch, - Fetch, - fetching, - finalizeAndReportTiming -} +// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint +webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { + let upperBound + let lowerBound + // 1. If bitLength is 64, then: + if (bitLength === 64) { + // 1. Let upperBound be 2^53 − 1. + upperBound = Math.pow(2, 53) - 1 -/***/ }), + // 2. If signedness is "unsigned", then let lowerBound be 0. + if (signedness === 'unsigned') { + lowerBound = 0 + } else { + // 3. Otherwise let lowerBound be −2^53 + 1. + lowerBound = Math.pow(-2, 53) + 1 + } + } else if (signedness === 'unsigned') { + // 2. Otherwise, if signedness is "unsigned", then: -/***/ 8359: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 1. Let lowerBound be 0. + lowerBound = 0 -"use strict"; -/* globals AbortController */ + // 2. Let upperBound be 2^bitLength − 1. + upperBound = Math.pow(2, bitLength) - 1 + } else { + // 3. Otherwise: + // 1. Let lowerBound be -2^bitLength − 1. + lowerBound = Math.pow(-2, bitLength) - 1 + // 2. Let upperBound be 2^bitLength − 1 − 1. + upperBound = Math.pow(2, bitLength - 1) - 1 + } -const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(9990) -const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(554) -const { FinalizationRegistry } = __nccwpck_require__(6436)() -const util = __nccwpck_require__(3983) -const { - isValidHTTPToken, - sameOrigin, - normalizeMethod, - makePolicyContainer, - normalizeMethodRecord -} = __nccwpck_require__(2538) -const { - forbiddenMethodsSet, - corsSafeListedMethodsSet, - referrerPolicy, - requestRedirect, - requestMode, - requestCredentials, - requestCache, - requestDuplex -} = __nccwpck_require__(1037) -const { kEnumerableProperty } = util -const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861) -const { webidl } = __nccwpck_require__(1744) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList, kConstruct } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361) + // 4. Let x be ? ToNumber(V). + let x = Number(V) -let TransformStream = globalThis.TransformStream + // 5. If x is −0, then set x to +0. + if (x === 0) { + x = 0 + } -const kAbortController = Symbol('abortController') + // 6. If the conversion is to an IDL type associated + // with the [EnforceRange] extended attribute, then: + if (opts.enforceRange === true) { + // 1. If x is NaN, +∞, or −∞, then throw a TypeError. + if ( + Number.isNaN(x) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Could not convert ${V} to an integer.` + }) + } -const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { - signal.removeEventListener('abort', abort) -}) + // 2. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) -// https://fetch.spec.whatwg.org/#request-class -class Request { - // https://fetch.spec.whatwg.org/#dom-request - constructor (input, init = {}) { - if (input === kConstruct) { - return + // 3. If x < lowerBound or x > upperBound, then + // throw a TypeError. + if (x < lowerBound || x > upperBound) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` + }) } - webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) + // 4. Return x. + return x + } - input = webidl.converters.RequestInfo(input) - init = webidl.converters.RequestInit(init) + // 7. If x is not NaN and the conversion is to an IDL + // type associated with the [Clamp] extended + // attribute, then: + if (!Number.isNaN(x) && opts.clamp === true) { + // 1. Set x to min(max(x, lowerBound), upperBound). + x = Math.min(Math.max(x, lowerBound), upperBound) - // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object - this[kRealm] = { - settingsObject: { - baseUrl: getGlobalOrigin(), - get origin () { - return this.baseUrl?.origin - }, - policyContainer: makePolicyContainer() - } + // 2. Round x to the nearest integer, choosing the + // even integer if it lies halfway between two, + // and choosing +0 rather than −0. + if (Math.floor(x) % 2 === 0) { + x = Math.floor(x) + } else { + x = Math.ceil(x) } - // 1. Let request be null. - let request = null - - // 2. Let fallbackMode be null. - let fallbackMode = null + // 3. Return x. + return x + } - // 3. Let baseURL be this’s relevant settings object’s API base URL. - const baseUrl = this[kRealm].settingsObject.baseUrl + // 8. If x is NaN, +0, +∞, or −∞, then return +0. + if ( + Number.isNaN(x) || + (x === 0 && Object.is(0, x)) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + return 0 + } - // 4. Let signal be null. - let signal = null + // 9. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) - // 5. If input is a string, then: - if (typeof input === 'string') { - // 1. Let parsedURL be the result of parsing input with baseURL. - // 2. If parsedURL is failure, then throw a TypeError. - let parsedURL - try { - parsedURL = new URL(input, baseUrl) - } catch (err) { - throw new TypeError('Failed to parse URL from ' + input, { cause: err }) - } + // 10. Set x to x modulo 2^bitLength. + x = x % Math.pow(2, bitLength) - // 3. If parsedURL includes credentials, then throw a TypeError. - if (parsedURL.username || parsedURL.password) { - throw new TypeError( - 'Request cannot be constructed from a URL that includes credentials: ' + - input - ) - } + // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, + // then return x − 2^bitLength. + if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { + return x - Math.pow(2, bitLength) + } - // 4. Set request to a new request whose URL is parsedURL. - request = makeRequest({ urlList: [parsedURL] }) + // 12. Otherwise, return x. + return x +} - // 5. Set fallbackMode to "cors". - fallbackMode = 'cors' - } else { - // 6. Otherwise: +// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart +webidl.util.IntegerPart = function (n) { + // 1. Let r be floor(abs(n)). + const r = Math.floor(Math.abs(n)) - // 7. Assert: input is a Request object. - assert(input instanceof Request) + // 2. If n < 0, then return -1 × r. + if (n < 0) { + return -1 * r + } - // 8. Set request to input’s request. - request = input[kState] + // 3. Otherwise, return r. + return r +} - // 9. Set signal to input’s signal. - signal = input[kSignal] +// https://webidl.spec.whatwg.org/#es-sequence +webidl.sequenceConverter = function (converter) { + return (V) => { + // 1. If Type(V) is not Object, throw a TypeError. + if (webidl.util.Type(V) !== 'Object') { + throw webidl.errors.exception({ + header: 'Sequence', + message: `Value of type ${webidl.util.Type(V)} is not an Object.` + }) } - // 7. Let origin be this’s relevant settings object’s origin. - const origin = this[kRealm].settingsObject.origin - - // 8. Let window be "client". - let window = 'client' + // 2. Let method be ? GetMethod(V, @@iterator). + /** @type {Generator} */ + const method = V?.[Symbol.iterator]?.() + const seq = [] - // 9. If request’s window is an environment settings object and its origin - // is same origin with origin, then set window to request’s window. + // 3. If method is undefined, throw a TypeError. if ( - request.window?.constructor?.name === 'EnvironmentSettingsObject' && - sameOrigin(request.window, origin) + method === undefined || + typeof method.next !== 'function' ) { - window = request.window + throw webidl.errors.exception({ + header: 'Sequence', + message: 'Object is not an iterator.' + }) } - // 10. If init["window"] exists and is non-null, then throw a TypeError. - if (init.window != null) { - throw new TypeError(`'window' option '${window}' must be null`) - } + // https://webidl.spec.whatwg.org/#create-sequence-from-iterable + while (true) { + const { done, value } = method.next() - // 11. If init["window"] exists, then set window to "no-window". - if ('window' in init) { - window = 'no-window' + if (done) { + break + } + + seq.push(converter(value)) } - // 12. Set request to a new request with the following properties: - request = makeRequest({ - // URL request’s URL. - // undici implementation note: this is set as the first item in request's urlList in makeRequest - // method request’s method. - method: request.method, - // header list A copy of request’s header list. - // undici implementation note: headersList is cloned in makeRequest - headersList: request.headersList, - // unsafe-request flag Set. - unsafeRequest: request.unsafeRequest, - // client This’s relevant settings object. - client: this[kRealm].settingsObject, - // window window. - window, - // priority request’s priority. - priority: request.priority, - // origin request’s origin. The propagation of the origin is only significant for navigation requests - // being handled by a service worker. In this scenario a request can have an origin that is different - // from the current client. - origin: request.origin, - // referrer request’s referrer. - referrer: request.referrer, - // referrer policy request’s referrer policy. - referrerPolicy: request.referrerPolicy, - // mode request’s mode. - mode: request.mode, - // credentials mode request’s credentials mode. - credentials: request.credentials, - // cache mode request’s cache mode. - cache: request.cache, - // redirect mode request’s redirect mode. - redirect: request.redirect, - // integrity metadata request’s integrity metadata. - integrity: request.integrity, - // keepalive request’s keepalive. - keepalive: request.keepalive, - // reload-navigation flag request’s reload-navigation flag. - reloadNavigation: request.reloadNavigation, - // history-navigation flag request’s history-navigation flag. - historyNavigation: request.historyNavigation, - // URL list A clone of request’s URL list. - urlList: [...request.urlList] - }) + return seq + } +} + +// https://webidl.spec.whatwg.org/#es-to-record +webidl.recordConverter = function (keyConverter, valueConverter) { + return (O) => { + // 1. If Type(O) is not Object, throw a TypeError. + if (webidl.util.Type(O) !== 'Object') { + throw webidl.errors.exception({ + header: 'Record', + message: `Value of type ${webidl.util.Type(O)} is not an Object.` + }) + } - const initHasKey = Object.keys(init).length !== 0 + // 2. Let result be a new empty instance of record. + const result = {} - // 13. If init is not empty, then: - if (initHasKey) { - // 1. If request’s mode is "navigate", then set it to "same-origin". - if (request.mode === 'navigate') { - request.mode = 'same-origin' + if (!types.isProxy(O)) { + // Object.keys only returns enumerable properties + const keys = Object.keys(O) + + for (const key of keys) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue } - // 2. Unset request’s reload-navigation flag. - request.reloadNavigation = false + // 5. Return result. + return result + } - // 3. Unset request’s history-navigation flag. - request.historyNavigation = false + // 3. Let keys be ? O.[[OwnPropertyKeys]](). + const keys = Reflect.ownKeys(O) - // 4. Set request’s origin to "client". - request.origin = 'client' + // 4. For each key of keys. + for (const key of keys) { + // 1. Let desc be ? O.[[GetOwnProperty]](key). + const desc = Reflect.getOwnPropertyDescriptor(O, key) - // 5. Set request’s referrer to "client" - request.referrer = 'client' + // 2. If desc is not undefined and desc.[[Enumerable]] is true: + if (desc?.enumerable) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) - // 6. Set request’s referrer policy to the empty string. - request.referrerPolicy = '' + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) - // 7. Set request’s URL to request’s current URL. - request.url = request.urlList[request.urlList.length - 1] + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + } - // 8. Set request’s URL list to « request’s URL ». - request.urlList = [request.url] + // 5. Return result. + return result + } +} + +webidl.interfaceConverter = function (i) { + return (V, opts = {}) => { + if (opts.strict !== false && !(V instanceof i)) { + throw webidl.errors.exception({ + header: i.name, + message: `Expected ${V} to be an instance of ${i.name}.` + }) } - // 14. If init["referrer"] exists, then: - if (init.referrer !== undefined) { - // 1. Let referrer be init["referrer"]. - const referrer = init.referrer + return V + } +} - // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". - if (referrer === '') { - request.referrer = 'no-referrer' - } else { - // 1. Let parsedReferrer be the result of parsing referrer with - // baseURL. - // 2. If parsedReferrer is failure, then throw a TypeError. - let parsedReferrer - try { - parsedReferrer = new URL(referrer, baseUrl) - } catch (err) { - throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) +webidl.dictionaryConverter = function (converters) { + return (dictionary) => { + const type = webidl.util.Type(dictionary) + const dict = {} + + if (type === 'Null' || type === 'Undefined') { + return dict + } else if (type !== 'Object') { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` + }) + } + + for (const options of converters) { + const { key, defaultValue, required, converter } = options + + if (required === true) { + if (!hasOwn(dictionary, key)) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Missing required key "${key}".` + }) } + } + + let value = dictionary[key] + const hasDefault = hasOwn(options, 'defaultValue') + + // Only use defaultValue if value is undefined and + // a defaultValue options was provided. + if (hasDefault && value !== null) { + value = value ?? defaultValue + } + + // A key can be optional and have no default value. + // When this happens, do not perform a conversion, + // and do not assign the key a value. + if (required || hasDefault || value !== undefined) { + value = converter(value) - // 3. If one of the following is true - // - parsedReferrer’s scheme is "about" and path is the string "client" - // - parsedReferrer’s origin is not same origin with origin - // then set request’s referrer to "client". if ( - (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || - (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + options.allowedValues && + !options.allowedValues.includes(value) ) { - request.referrer = 'client' - } else { - // 4. Otherwise, set request’s referrer to parsedReferrer. - request.referrer = parsedReferrer + throw webidl.errors.exception({ + header: 'Dictionary', + message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` + }) } + + dict[key] = value } } - // 15. If init["referrerPolicy"] exists, then set request’s referrer policy - // to it. - if (init.referrerPolicy !== undefined) { - request.referrerPolicy = init.referrerPolicy - } + return dict + } +} - // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. - let mode - if (init.mode !== undefined) { - mode = init.mode - } else { - mode = fallbackMode +webidl.nullableConverter = function (converter) { + return (V) => { + if (V === null) { + return V } - // 17. If mode is "navigate", then throw a TypeError. - if (mode === 'navigate') { - throw webidl.errors.exception({ - header: 'Request constructor', - message: 'invalid request mode navigate.' - }) - } + return converter(V) + } +} - // 18. If mode is non-null, set request’s mode to mode. - if (mode != null) { - request.mode = mode - } +// https://webidl.spec.whatwg.org/#es-DOMString +webidl.converters.DOMString = function (V, opts = {}) { + // 1. If V is null and the conversion is to an IDL type + // associated with the [LegacyNullToEmptyString] + // extended attribute, then return the DOMString value + // that represents the empty string. + if (V === null && opts.legacyNullToEmptyString) { + return '' + } - // 19. If init["credentials"] exists, then set request’s credentials mode - // to it. - if (init.credentials !== undefined) { - request.credentials = init.credentials - } + // 2. Let x be ? ToString(V). + if (typeof V === 'symbol') { + throw new TypeError('Could not convert argument of type symbol to string.') + } - // 18. If init["cache"] exists, then set request’s cache mode to it. - if (init.cache !== undefined) { - request.cache = init.cache - } + // 3. Return the IDL DOMString value that represents the + // same sequence of code units as the one the + // ECMAScript String value x represents. + return String(V) +} - // 21. If request’s cache mode is "only-if-cached" and request’s mode is - // not "same-origin", then throw a TypeError. - if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { +// https://webidl.spec.whatwg.org/#es-ByteString +webidl.converters.ByteString = function (V) { + // 1. Let x be ? ToString(V). + // Note: DOMString converter perform ? ToString(V) + const x = webidl.converters.DOMString(V) + + // 2. If the value of any element of x is greater than + // 255, then throw a TypeError. + for (let index = 0; index < x.length; index++) { + if (x.charCodeAt(index) > 255) { throw new TypeError( - "'only-if-cached' can be set only with 'same-origin' mode" + 'Cannot convert argument to a ByteString because the character at ' + + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` ) } + } - // 22. If init["redirect"] exists, then set request’s redirect mode to it. - if (init.redirect !== undefined) { - request.redirect = init.redirect - } + // 3. Return an IDL ByteString value whose length is the + // length of x, and where the value of each element is + // the value of the corresponding element of x. + return x +} - // 23. If init["integrity"] exists, then set request’s integrity metadata to it. - if (init.integrity != null) { - request.integrity = String(init.integrity) - } +// https://webidl.spec.whatwg.org/#es-USVString +webidl.converters.USVString = toUSVString - // 24. If init["keepalive"] exists, then set request’s keepalive to it. - if (init.keepalive !== undefined) { - request.keepalive = Boolean(init.keepalive) - } +// https://webidl.spec.whatwg.org/#es-boolean +webidl.converters.boolean = function (V) { + // 1. Let x be the result of computing ToBoolean(V). + const x = Boolean(V) - // 25. If init["method"] exists, then: - if (init.method !== undefined) { - // 1. Let method be init["method"]. - let method = init.method + // 2. Return the IDL boolean value that is the one that represents + // the same truth value as the ECMAScript Boolean value x. + return x +} - // 2. If method is not a method or method is a forbidden method, then - // throw a TypeError. - if (!isValidHTTPToken(method)) { - throw new TypeError(`'${method}' is not a valid HTTP method.`) - } +// https://webidl.spec.whatwg.org/#es-any +webidl.converters.any = function (V) { + return V +} - if (forbiddenMethodsSet.has(method.toUpperCase())) { - throw new TypeError(`'${method}' HTTP method is unsupported.`) - } +// https://webidl.spec.whatwg.org/#es-long-long +webidl.converters['long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "signed"). + const x = webidl.util.ConvertToInt(V, 64, 'signed') - // 3. Normalize method. - method = normalizeMethodRecord[method] ?? normalizeMethod(method) + // 2. Return the IDL long long value that represents + // the same numeric value as x. + return x +} - // 4. Set request’s method to method. - request.method = method - } +// https://webidl.spec.whatwg.org/#es-unsigned-long-long +webidl.converters['unsigned long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). + const x = webidl.util.ConvertToInt(V, 64, 'unsigned') - // 26. If init["signal"] exists, then set signal to it. - if (init.signal !== undefined) { - signal = init.signal - } + // 2. Return the IDL unsigned long long value that + // represents the same numeric value as x. + return x +} - // 27. Set this’s request to request. - this[kState] = request +// https://webidl.spec.whatwg.org/#es-unsigned-long +webidl.converters['unsigned long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). + const x = webidl.util.ConvertToInt(V, 32, 'unsigned') - // 28. Set this’s signal to a new AbortSignal object with this’s relevant - // Realm. - // TODO: could this be simplified with AbortSignal.any - // (https://dom.spec.whatwg.org/#dom-abortsignal-any) - const ac = new AbortController() - this[kSignal] = ac.signal - this[kSignal][kRealm] = this[kRealm] + // 2. Return the IDL unsigned long value that + // represents the same numeric value as x. + return x +} - // 29. If signal is not null, then make this’s signal follow signal. - if (signal != null) { - if ( - !signal || - typeof signal.aborted !== 'boolean' || - typeof signal.addEventListener !== 'function' - ) { - throw new TypeError( - "Failed to construct 'Request': member signal is not of type AbortSignal." - ) - } +// https://webidl.spec.whatwg.org/#es-unsigned-short +webidl.converters['unsigned short'] = function (V, opts) { + // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). + const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) - if (signal.aborted) { - ac.abort(signal.reason) - } else { - // Keep a strong ref to ac while request object - // is alive. This is needed to prevent AbortController - // from being prematurely garbage collected. - // See, https://github.com/nodejs/undici/issues/1926. - this[kAbortController] = ac + // 2. Return the IDL unsigned short value that represents + // the same numeric value as x. + return x +} - const acRef = new WeakRef(ac) - const abort = function () { - const ac = acRef.deref() - if (ac !== undefined) { - ac.abort(this.reason) - } - } +// https://webidl.spec.whatwg.org/#idl-ArrayBuffer +webidl.converters.ArrayBuffer = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have an + // [[ArrayBufferData]] internal slot, then throw a + // TypeError. + // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances + // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances + if ( + webidl.util.Type(V) !== 'Object' || + !types.isAnyArrayBuffer(V) + ) { + throw webidl.errors.conversionFailed({ + prefix: `${V}`, + argument: `${V}`, + types: ['ArrayBuffer'] + }) + } - // Third-party AbortControllers may not work with these. - // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. - try { - // If the max amount of listeners is equal to the default, increase it - // This is only available in node >= v19.9.0 - if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { - setMaxListeners(100, signal) - } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { - setMaxListeners(100, signal) - } - } catch {} + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V) is true, then throw a + // TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } - util.addAbortListener(signal, abort) - requestFinalizer.register(ac, { signal, abort }) - } - } + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V) is true, then throw a + // TypeError. + // Note: resizable ArrayBuffers are currently a proposal. - // 30. Set this’s headers to a new Headers object with this’s relevant - // Realm, whose header list is request’s header list and guard is - // "request". - this[kHeaders] = new Headers(kConstruct) - this[kHeaders][kHeadersList] = request.headersList - this[kHeaders][kGuard] = 'request' - this[kHeaders][kRealm] = this[kRealm] + // 4. Return the IDL ArrayBuffer value that is a + // reference to the same object as V. + return V +} - // 31. If this’s request’s mode is "no-cors", then: - if (mode === 'no-cors') { - // 1. If this’s request’s method is not a CORS-safelisted method, - // then throw a TypeError. - if (!corsSafeListedMethodsSet.has(request.method)) { - throw new TypeError( - `'${request.method} is unsupported in no-cors mode.` - ) - } +webidl.converters.TypedArray = function (V, T, opts = {}) { + // 1. Let T be the IDL type V is being converted to. - // 2. Set this’s headers’s guard to "request-no-cors". - this[kHeaders][kGuard] = 'request-no-cors' - } + // 2. If Type(V) is not Object, or V does not have a + // [[TypedArrayName]] internal slot with a value + // equal to T’s name, then throw a TypeError. + if ( + webidl.util.Type(V) !== 'Object' || + !types.isTypedArray(V) || + V.constructor.name !== T.name + ) { + throw webidl.errors.conversionFailed({ + prefix: `${T.name}`, + argument: `${V}`, + types: [T.name] + }) + } - // 32. If init is not empty, then: - if (initHasKey) { - /** @type {HeadersList} */ - const headersList = this[kHeaders][kHeadersList] - // 1. Let headers be a copy of this’s headers and its associated header - // list. - // 2. If init["headers"] exists, then set headers to init["headers"]. - const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) + // 3. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } - // 3. Empty this’s headers’s header list. - headersList.clear() + // 4. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable array buffers are currently a proposal - // 4. If headers is a Headers object, then for each header in its header - // list, append header’s name/header’s value to this’s headers. - if (headers instanceof HeadersList) { - for (const [key, val] of headers) { - headersList.append(key, val) - } - // Note: Copy the `set-cookie` meta-data. - headersList.cookies = headers.cookies - } else { - // 5. Otherwise, fill this’s headers with headers. - fillHeaders(this[kHeaders], headers) - } - } + // 5. Return the IDL value of type T that is a reference + // to the same object as V. + return V +} + +webidl.converters.DataView = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have a + // [[DataView]] internal slot, then throw a TypeError. + if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { + throw webidl.errors.exception({ + header: 'DataView', + message: 'Object is not a DataView.' + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, + // then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable ArrayBuffers are currently a proposal + + // 4. Return the IDL DataView value that is a reference + // to the same object as V. + return V +} + +// https://webidl.spec.whatwg.org/#BufferSource +webidl.converters.BufferSource = function (V, opts = {}) { + if (types.isAnyArrayBuffer(V)) { + return webidl.converters.ArrayBuffer(V, opts) + } - // 33. Let inputBody be input’s request’s body if input is a Request - // object; otherwise null. - const inputBody = input instanceof Request ? input[kState].body : null + if (types.isTypedArray(V)) { + return webidl.converters.TypedArray(V, V.constructor) + } - // 34. If either init["body"] exists and is non-null or inputBody is - // non-null, and request’s method is `GET` or `HEAD`, then throw a - // TypeError. - if ( - (init.body != null || inputBody != null) && - (request.method === 'GET' || request.method === 'HEAD') - ) { - throw new TypeError('Request with GET/HEAD method cannot have body.') - } + if (types.isDataView(V)) { + return webidl.converters.DataView(V, opts) + } - // 35. Let initBody be null. - let initBody = null + throw new TypeError(`Could not convert ${V} to a BufferSource.`) +} - // 36. If init["body"] exists and is non-null, then: - if (init.body != null) { - // 1. Let Content-Type be null. - // 2. Set initBody and Content-Type to the result of extracting - // init["body"], with keepalive set to request’s keepalive. - const [extractedBody, contentType] = extractBody( - init.body, - request.keepalive - ) - initBody = extractedBody +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.ByteString +) - // 3, If Content-Type is non-null and this’s headers’s header list does - // not contain `Content-Type`, then append `Content-Type`/Content-Type to - // this’s headers. - if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { - this[kHeaders].append('content-type', contentType) - } - } +webidl.converters['sequence>'] = webidl.sequenceConverter( + webidl.converters['sequence'] +) - // 37. Let inputOrInitBody be initBody if it is non-null; otherwise - // inputBody. - const inputOrInitBody = initBody ?? inputBody +webidl.converters['record'] = webidl.recordConverter( + webidl.converters.ByteString, + webidl.converters.ByteString +) - // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is - // null, then: - if (inputOrInitBody != null && inputOrInitBody.source == null) { - // 1. If initBody is non-null and init["duplex"] does not exist, - // then throw a TypeError. - if (initBody != null && init.duplex == null) { - throw new TypeError('RequestInit: duplex option is required when sending a body.') - } +module.exports = { + webidl +} - // 2. If this’s request’s mode is neither "same-origin" nor "cors", - // then throw a TypeError. - if (request.mode !== 'same-origin' && request.mode !== 'cors') { - throw new TypeError( - 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' - ) - } - // 3. Set this’s request’s use-CORS-preflight flag. - request.useCORSPreflightFlag = true - } +/***/ }), - // 39. Let finalBody be inputOrInitBody. - let finalBody = inputOrInitBody +/***/ 84854: +/***/ ((module) => { - // 40. If initBody is null and inputBody is non-null, then: - if (initBody == null && inputBody != null) { - // 1. If input is unusable, then throw a TypeError. - if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { - throw new TypeError( - 'Cannot construct a Request with a Request object that has already been used.' - ) - } +"use strict"; - // 2. Set finalBody to the result of creating a proxy for inputBody. - if (!TransformStream) { - TransformStream = (__nccwpck_require__(5356).TransformStream) - } - // https://streams.spec.whatwg.org/#readablestream-create-a-proxy - const identityTransform = new TransformStream() - inputBody.stream.pipeThrough(identityTransform) - finalBody = { - source: inputBody.source, - length: inputBody.length, - stream: identityTransform.readable - } - } +/** + * @see https://encoding.spec.whatwg.org/#concept-encoding-get + * @param {string|undefined} label + */ +function getEncoding (label) { + if (!label) { + return 'failure' + } - // 41. Set this’s request’s body to finalBody. - this[kState].body = finalBody + // 1. Remove any leading and trailing ASCII whitespace from label. + // 2. If label is an ASCII case-insensitive match for any of the + // labels listed in the table below, then return the + // corresponding encoding; otherwise return failure. + switch (label.trim().toLowerCase()) { + case 'unicode-1-1-utf-8': + case 'unicode11utf8': + case 'unicode20utf8': + case 'utf-8': + case 'utf8': + case 'x-unicode20utf8': + return 'UTF-8' + case '866': + case 'cp866': + case 'csibm866': + case 'ibm866': + return 'IBM866' + case 'csisolatin2': + case 'iso-8859-2': + case 'iso-ir-101': + case 'iso8859-2': + case 'iso88592': + case 'iso_8859-2': + case 'iso_8859-2:1987': + case 'l2': + case 'latin2': + return 'ISO-8859-2' + case 'csisolatin3': + case 'iso-8859-3': + case 'iso-ir-109': + case 'iso8859-3': + case 'iso88593': + case 'iso_8859-3': + case 'iso_8859-3:1988': + case 'l3': + case 'latin3': + return 'ISO-8859-3' + case 'csisolatin4': + case 'iso-8859-4': + case 'iso-ir-110': + case 'iso8859-4': + case 'iso88594': + case 'iso_8859-4': + case 'iso_8859-4:1988': + case 'l4': + case 'latin4': + return 'ISO-8859-4' + case 'csisolatincyrillic': + case 'cyrillic': + case 'iso-8859-5': + case 'iso-ir-144': + case 'iso8859-5': + case 'iso88595': + case 'iso_8859-5': + case 'iso_8859-5:1988': + return 'ISO-8859-5' + case 'arabic': + case 'asmo-708': + case 'csiso88596e': + case 'csiso88596i': + case 'csisolatinarabic': + case 'ecma-114': + case 'iso-8859-6': + case 'iso-8859-6-e': + case 'iso-8859-6-i': + case 'iso-ir-127': + case 'iso8859-6': + case 'iso88596': + case 'iso_8859-6': + case 'iso_8859-6:1987': + return 'ISO-8859-6' + case 'csisolatingreek': + case 'ecma-118': + case 'elot_928': + case 'greek': + case 'greek8': + case 'iso-8859-7': + case 'iso-ir-126': + case 'iso8859-7': + case 'iso88597': + case 'iso_8859-7': + case 'iso_8859-7:1987': + case 'sun_eu_greek': + return 'ISO-8859-7' + case 'csiso88598e': + case 'csisolatinhebrew': + case 'hebrew': + case 'iso-8859-8': + case 'iso-8859-8-e': + case 'iso-ir-138': + case 'iso8859-8': + case 'iso88598': + case 'iso_8859-8': + case 'iso_8859-8:1988': + case 'visual': + return 'ISO-8859-8' + case 'csiso88598i': + case 'iso-8859-8-i': + case 'logical': + return 'ISO-8859-8-I' + case 'csisolatin6': + case 'iso-8859-10': + case 'iso-ir-157': + case 'iso8859-10': + case 'iso885910': + case 'l6': + case 'latin6': + return 'ISO-8859-10' + case 'iso-8859-13': + case 'iso8859-13': + case 'iso885913': + return 'ISO-8859-13' + case 'iso-8859-14': + case 'iso8859-14': + case 'iso885914': + return 'ISO-8859-14' + case 'csisolatin9': + case 'iso-8859-15': + case 'iso8859-15': + case 'iso885915': + case 'iso_8859-15': + case 'l9': + return 'ISO-8859-15' + case 'iso-8859-16': + return 'ISO-8859-16' + case 'cskoi8r': + case 'koi': + case 'koi8': + case 'koi8-r': + case 'koi8_r': + return 'KOI8-R' + case 'koi8-ru': + case 'koi8-u': + return 'KOI8-U' + case 'csmacintosh': + case 'mac': + case 'macintosh': + case 'x-mac-roman': + return 'macintosh' + case 'iso-8859-11': + case 'iso8859-11': + case 'iso885911': + case 'tis-620': + case 'windows-874': + return 'windows-874' + case 'cp1250': + case 'windows-1250': + case 'x-cp1250': + return 'windows-1250' + case 'cp1251': + case 'windows-1251': + case 'x-cp1251': + return 'windows-1251' + case 'ansi_x3.4-1968': + case 'ascii': + case 'cp1252': + case 'cp819': + case 'csisolatin1': + case 'ibm819': + case 'iso-8859-1': + case 'iso-ir-100': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'iso_8859-1:1987': + case 'l1': + case 'latin1': + case 'us-ascii': + case 'windows-1252': + case 'x-cp1252': + return 'windows-1252' + case 'cp1253': + case 'windows-1253': + case 'x-cp1253': + return 'windows-1253' + case 'cp1254': + case 'csisolatin5': + case 'iso-8859-9': + case 'iso-ir-148': + case 'iso8859-9': + case 'iso88599': + case 'iso_8859-9': + case 'iso_8859-9:1989': + case 'l5': + case 'latin5': + case 'windows-1254': + case 'x-cp1254': + return 'windows-1254' + case 'cp1255': + case 'windows-1255': + case 'x-cp1255': + return 'windows-1255' + case 'cp1256': + case 'windows-1256': + case 'x-cp1256': + return 'windows-1256' + case 'cp1257': + case 'windows-1257': + case 'x-cp1257': + return 'windows-1257' + case 'cp1258': + case 'windows-1258': + case 'x-cp1258': + return 'windows-1258' + case 'x-mac-cyrillic': + case 'x-mac-ukrainian': + return 'x-mac-cyrillic' + case 'chinese': + case 'csgb2312': + case 'csiso58gb231280': + case 'gb2312': + case 'gb_2312': + case 'gb_2312-80': + case 'gbk': + case 'iso-ir-58': + case 'x-gbk': + return 'GBK' + case 'gb18030': + return 'gb18030' + case 'big5': + case 'big5-hkscs': + case 'cn-big5': + case 'csbig5': + case 'x-x-big5': + return 'Big5' + case 'cseucpkdfmtjapanese': + case 'euc-jp': + case 'x-euc-jp': + return 'EUC-JP' + case 'csiso2022jp': + case 'iso-2022-jp': + return 'ISO-2022-JP' + case 'csshiftjis': + case 'ms932': + case 'ms_kanji': + case 'shift-jis': + case 'shift_jis': + case 'sjis': + case 'windows-31j': + case 'x-sjis': + return 'Shift_JIS' + case 'cseuckr': + case 'csksc56011987': + case 'euc-kr': + case 'iso-ir-149': + case 'korean': + case 'ks_c_5601-1987': + case 'ks_c_5601-1989': + case 'ksc5601': + case 'ksc_5601': + case 'windows-949': + return 'EUC-KR' + case 'csiso2022kr': + case 'hz-gb-2312': + case 'iso-2022-cn': + case 'iso-2022-cn-ext': + case 'iso-2022-kr': + case 'replacement': + return 'replacement' + case 'unicodefffe': + case 'utf-16be': + return 'UTF-16BE' + case 'csunicode': + case 'iso-10646-ucs-2': + case 'ucs-2': + case 'unicode': + case 'unicodefeff': + case 'utf-16': + case 'utf-16le': + return 'UTF-16LE' + case 'x-user-defined': + return 'x-user-defined' + default: return 'failure' } +} - // Returns request’s HTTP method, which is "GET" by default. - get method () { - webidl.brandCheck(this, Request) +module.exports = { + getEncoding +} - // The method getter steps are to return this’s request’s method. - return this[kState].method - } - // Returns the URL of request as a string. - get url () { - webidl.brandCheck(this, Request) +/***/ }), - // The url getter steps are to return this’s request’s URL, serialized. - return URLSerializer(this[kState].url) - } +/***/ 1446: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Returns a Headers object consisting of the headers associated with request. - // Note that headers added in the network layer by the user agent will not - // be accounted for in this object, e.g., the "Host" header. - get headers () { - webidl.brandCheck(this, Request) +"use strict"; - // The headers getter steps are to return this’s headers. - return this[kHeaders] - } - // Returns the kind of resource requested by request, e.g., "document" - // or "script". - get destination () { - webidl.brandCheck(this, Request) +const { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} = __nccwpck_require__(87530) +const { + kState, + kError, + kResult, + kEvents, + kAborted +} = __nccwpck_require__(29054) +const { webidl } = __nccwpck_require__(21744) +const { kEnumerableProperty } = __nccwpck_require__(83983) - // The destination getter are to return this’s request’s destination. - return this[kState].destination +class FileReader extends EventTarget { + constructor () { + super() + + this[kState] = 'empty' + this[kResult] = null + this[kError] = null + this[kEvents] = { + loadend: null, + error: null, + abort: null, + load: null, + progress: null, + loadstart: null + } } - // Returns the referrer of request. Its value can be a same-origin URL if - // explicitly set in init, the empty string to indicate no referrer, and - // "about:client" when defaulting to the global’s default. This is used - // during fetching to determine the value of the `Referer` header of the - // request being made. - get referrer () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer + * @param {import('buffer').Blob} blob + */ + readAsArrayBuffer (blob) { + webidl.brandCheck(this, FileReader) - // 1. If this’s request’s referrer is "no-referrer", then return the - // empty string. - if (this[kState].referrer === 'no-referrer') { - return '' - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) - // 2. If this’s request’s referrer is "client", then return - // "about:client". - if (this[kState].referrer === 'client') { - return 'about:client' - } + blob = webidl.converters.Blob(blob, { strict: false }) - // Return this’s request’s referrer, serialized. - return this[kState].referrer.toString() + // The readAsArrayBuffer(blob) method, when invoked, + // must initiate a read operation for blob with ArrayBuffer. + readOperation(this, blob, 'ArrayBuffer') } - // Returns the referrer policy associated with request. - // This is used during fetching to compute the value of the request’s - // referrer. - get referrerPolicy () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#readAsBinaryString + * @param {import('buffer').Blob} blob + */ + readAsBinaryString (blob) { + webidl.brandCheck(this, FileReader) - // The referrerPolicy getter steps are to return this’s request’s referrer policy. - return this[kState].referrerPolicy - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) - // Returns the mode associated with request, which is a string indicating - // whether the request will use CORS, or will be restricted to same-origin - // URLs. - get mode () { - webidl.brandCheck(this, Request) + blob = webidl.converters.Blob(blob, { strict: false }) - // The mode getter steps are to return this’s request’s mode. - return this[kState].mode + // The readAsBinaryString(blob) method, when invoked, + // must initiate a read operation for blob with BinaryString. + readOperation(this, blob, 'BinaryString') } - // Returns the credentials mode associated with request, - // which is a string indicating whether credentials will be sent with the - // request always, never, or only when sent to a same-origin URL. - get credentials () { - // The credentials getter steps are to return this’s request’s credentials mode. - return this[kState].credentials + /** + * @see https://w3c.github.io/FileAPI/#readAsDataText + * @param {import('buffer').Blob} blob + * @param {string?} encoding + */ + readAsText (blob, encoding = undefined) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + if (encoding !== undefined) { + encoding = webidl.converters.DOMString(encoding) + } + + // The readAsText(blob, encoding) method, when invoked, + // must initiate a read operation for blob with Text and encoding. + readOperation(this, blob, 'Text', encoding) } - // Returns the cache mode associated with request, - // which is a string indicating how the request will - // interact with the browser’s cache when fetching. - get cache () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL + * @param {import('buffer').Blob} blob + */ + readAsDataURL (blob) { + webidl.brandCheck(this, FileReader) - // The cache getter steps are to return this’s request’s cache mode. - return this[kState].cache - } + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) - // Returns the redirect mode associated with request, - // which is a string indicating how redirects for the - // request will be handled during fetching. A request - // will follow redirects by default. - get redirect () { - webidl.brandCheck(this, Request) + blob = webidl.converters.Blob(blob, { strict: false }) - // The redirect getter steps are to return this’s request’s redirect mode. - return this[kState].redirect + // The readAsDataURL(blob) method, when invoked, must + // initiate a read operation for blob with DataURL. + readOperation(this, blob, 'DataURL') } - // Returns request’s subresource integrity metadata, which is a - // cryptographic hash of the resource being fetched. Its value - // consists of multiple hashes separated by whitespace. [SRI] - get integrity () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#dfn-abort + */ + abort () { + // 1. If this's state is "empty" or if this's state is + // "done" set this's result to null and terminate + // this algorithm. + if (this[kState] === 'empty' || this[kState] === 'done') { + this[kResult] = null + return + } - // The integrity getter steps are to return this’s request’s integrity - // metadata. - return this[kState].integrity - } + // 2. If this's state is "loading" set this's state to + // "done" and set this's result to null. + if (this[kState] === 'loading') { + this[kState] = 'done' + this[kResult] = null + } - // Returns a boolean indicating whether or not request can outlive the - // global in which it was created. - get keepalive () { - webidl.brandCheck(this, Request) + // 3. If there are any tasks from this on the file reading + // task source in an affiliated task queue, then remove + // those tasks from that task queue. + this[kAborted] = true - // The keepalive getter steps are to return this’s request’s keepalive. - return this[kState].keepalive - } + // 4. Terminate the algorithm for the read method being processed. + // TODO - // Returns a boolean indicating whether or not request is for a reload - // navigation. - get isReloadNavigation () { - webidl.brandCheck(this, Request) + // 5. Fire a progress event called abort at this. + fireAProgressEvent('abort', this) - // The isReloadNavigation getter steps are to return true if this’s - // request’s reload-navigation flag is set; otherwise false. - return this[kState].reloadNavigation + // 6. If this's state is not "loading", fire a progress + // event called loadend at this. + if (this[kState] !== 'loading') { + fireAProgressEvent('loadend', this) + } } - // Returns a boolean indicating whether or not request is for a history - // navigation (a.k.a. back-foward navigation). - get isHistoryNavigation () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate + */ + get readyState () { + webidl.brandCheck(this, FileReader) - // The isHistoryNavigation getter steps are to return true if this’s request’s - // history-navigation flag is set; otherwise false. - return this[kState].historyNavigation + switch (this[kState]) { + case 'empty': return this.EMPTY + case 'loading': return this.LOADING + case 'done': return this.DONE + } } - // Returns the signal associated with request, which is an AbortSignal - // object indicating whether or not request has been aborted, and its - // abort event handler. - get signal () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-result + */ + get result () { + webidl.brandCheck(this, FileReader) - // The signal getter steps are to return this’s signal. - return this[kSignal] + // The result attribute’s getter, when invoked, must return + // this's result. + return this[kResult] } - get body () { - webidl.brandCheck(this, Request) + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-error + */ + get error () { + webidl.brandCheck(this, FileReader) - return this[kState].body ? this[kState].body.stream : null + // The error attribute’s getter, when invoked, must return + // this's error. + return this[kError] } - get bodyUsed () { - webidl.brandCheck(this, Request) + get onloadend () { + webidl.brandCheck(this, FileReader) - return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + return this[kEvents].loadend } - get duplex () { - webidl.brandCheck(this, Request) + set onloadend (fn) { + webidl.brandCheck(this, FileReader) - return 'half' + if (this[kEvents].loadend) { + this.removeEventListener('loadend', this[kEvents].loadend) + } + + if (typeof fn === 'function') { + this[kEvents].loadend = fn + this.addEventListener('loadend', fn) + } else { + this[kEvents].loadend = null + } } - // Returns a clone of request. - clone () { - webidl.brandCheck(this, Request) + get onerror () { + webidl.brandCheck(this, FileReader) - // 1. If this is unusable, then throw a TypeError. - if (this.bodyUsed || this.body?.locked) { - throw new TypeError('unusable') - } + return this[kEvents].error + } - // 2. Let clonedRequest be the result of cloning this’s request. - const clonedRequest = cloneRequest(this[kState]) + set onerror (fn) { + webidl.brandCheck(this, FileReader) - // 3. Let clonedRequestObject be the result of creating a Request object, - // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. - const clonedRequestObject = new Request(kConstruct) - clonedRequestObject[kState] = clonedRequest - clonedRequestObject[kRealm] = this[kRealm] - clonedRequestObject[kHeaders] = new Headers(kConstruct) - clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList - clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] - clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] + if (this[kEvents].error) { + this.removeEventListener('error', this[kEvents].error) + } - // 4. Make clonedRequestObject’s signal follow this’s signal. - const ac = new AbortController() - if (this.signal.aborted) { - ac.abort(this.signal.reason) + if (typeof fn === 'function') { + this[kEvents].error = fn + this.addEventListener('error', fn) } else { - util.addAbortListener( - this.signal, - () => { - ac.abort(this.signal.reason) - } - ) + this[kEvents].error = null } - clonedRequestObject[kSignal] = ac.signal - - // 4. Return clonedRequestObject. - return clonedRequestObject } -} -mixinBody(Request) + get onloadstart () { + webidl.brandCheck(this, FileReader) -function makeRequest (init) { - // https://fetch.spec.whatwg.org/#requests - const request = { - method: 'GET', - localURLsOnly: false, - unsafeRequest: false, - body: null, - client: null, - reservedClient: null, - replacesClientId: '', - window: 'client', - keepalive: false, - serviceWorkers: 'all', - initiator: '', - destination: '', - priority: null, - origin: 'client', - policyContainer: 'client', - referrer: 'client', - referrerPolicy: '', - mode: 'no-cors', - useCORSPreflightFlag: false, - credentials: 'same-origin', - useCredentials: false, - cache: 'default', - redirect: 'follow', - integrity: '', - cryptoGraphicsNonceMetadata: '', - parserMetadata: '', - reloadNavigation: false, - historyNavigation: false, - userActivation: false, - taintedOrigin: false, - redirectCount: 0, - responseTainting: 'basic', - preventNoCacheCacheControlHeaderModification: false, - done: false, - timingAllowFailed: false, - ...init, - headersList: init.headersList - ? new HeadersList(init.headersList) - : new HeadersList() + return this[kEvents].loadstart } - request.url = request.urlList[0] - return request -} -// https://fetch.spec.whatwg.org/#concept-request-clone -function cloneRequest (request) { - // To clone a request request, run these steps: + set onloadstart (fn) { + webidl.brandCheck(this, FileReader) - // 1. Let newRequest be a copy of request, except for its body. - const newRequest = makeRequest({ ...request, body: null }) + if (this[kEvents].loadstart) { + this.removeEventListener('loadstart', this[kEvents].loadstart) + } - // 2. If request’s body is non-null, set newRequest’s body to the - // result of cloning request’s body. - if (request.body != null) { - newRequest.body = cloneBody(request.body) + if (typeof fn === 'function') { + this[kEvents].loadstart = fn + this.addEventListener('loadstart', fn) + } else { + this[kEvents].loadstart = null + } } - // 3. Return newRequest. - return newRequest -} + get onprogress () { + webidl.brandCheck(this, FileReader) -Object.defineProperties(Request.prototype, { - method: kEnumerableProperty, - url: kEnumerableProperty, - headers: kEnumerableProperty, - redirect: kEnumerableProperty, - clone: kEnumerableProperty, - signal: kEnumerableProperty, - duplex: kEnumerableProperty, - destination: kEnumerableProperty, - body: kEnumerableProperty, - bodyUsed: kEnumerableProperty, - isHistoryNavigation: kEnumerableProperty, - isReloadNavigation: kEnumerableProperty, - keepalive: kEnumerableProperty, - integrity: kEnumerableProperty, - cache: kEnumerableProperty, - credentials: kEnumerableProperty, - attribute: kEnumerableProperty, - referrerPolicy: kEnumerableProperty, - referrer: kEnumerableProperty, - mode: kEnumerableProperty, - [Symbol.toStringTag]: { - value: 'Request', - configurable: true + return this[kEvents].progress } -}) -webidl.converters.Request = webidl.interfaceConverter( - Request -) + set onprogress (fn) { + webidl.brandCheck(this, FileReader) -// https://fetch.spec.whatwg.org/#requestinfo -webidl.converters.RequestInfo = function (V) { - if (typeof V === 'string') { - return webidl.converters.USVString(V) - } + if (this[kEvents].progress) { + this.removeEventListener('progress', this[kEvents].progress) + } - if (V instanceof Request) { - return webidl.converters.Request(V) + if (typeof fn === 'function') { + this[kEvents].progress = fn + this.addEventListener('progress', fn) + } else { + this[kEvents].progress = null + } } - return webidl.converters.USVString(V) -} - -webidl.converters.AbortSignal = webidl.interfaceConverter( - AbortSignal -) + get onload () { + webidl.brandCheck(this, FileReader) -// https://fetch.spec.whatwg.org/#requestinit -webidl.converters.RequestInit = webidl.dictionaryConverter([ - { - key: 'method', - converter: webidl.converters.ByteString - }, - { - key: 'headers', - converter: webidl.converters.HeadersInit - }, - { - key: 'body', - converter: webidl.nullableConverter( - webidl.converters.BodyInit - ) - }, - { - key: 'referrer', - converter: webidl.converters.USVString - }, - { - key: 'referrerPolicy', - converter: webidl.converters.DOMString, - // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy - allowedValues: referrerPolicy - }, - { - key: 'mode', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#concept-request-mode - allowedValues: requestMode - }, - { - key: 'credentials', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#requestcredentials - allowedValues: requestCredentials - }, - { - key: 'cache', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#requestcache - allowedValues: requestCache - }, - { - key: 'redirect', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#requestredirect - allowedValues: requestRedirect - }, - { - key: 'integrity', - converter: webidl.converters.DOMString - }, - { - key: 'keepalive', - converter: webidl.converters.boolean - }, - { - key: 'signal', - converter: webidl.nullableConverter( - (signal) => webidl.converters.AbortSignal( - signal, - { strict: false } - ) - ) - }, - { - key: 'window', - converter: webidl.converters.any - }, - { - key: 'duplex', - converter: webidl.converters.DOMString, - allowedValues: requestDuplex + return this[kEvents].load } -]) -module.exports = { Request, makeRequest } + set onload (fn) { + webidl.brandCheck(this, FileReader) + if (this[kEvents].load) { + this.removeEventListener('load', this[kEvents].load) + } -/***/ }), + if (typeof fn === 'function') { + this[kEvents].load = fn + this.addEventListener('load', fn) + } else { + this[kEvents].load = null + } + } -/***/ 7823: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + get onabort () { + webidl.brandCheck(this, FileReader) -"use strict"; + return this[kEvents].abort + } + set onabort (fn) { + webidl.brandCheck(this, FileReader) -const { Headers, HeadersList, fill } = __nccwpck_require__(554) -const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(9990) -const util = __nccwpck_require__(3983) -const { kEnumerableProperty } = util -const { - isValidReasonPhrase, - isCancelled, - isAborted, - isBlobLike, - serializeJavascriptValueToJSONString, - isErrorLike, - isomorphicEncode -} = __nccwpck_require__(2538) -const { - redirectStatusSet, - nullBodyStatus, - DOMException -} = __nccwpck_require__(1037) -const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) -const { webidl } = __nccwpck_require__(1744) -const { FormData } = __nccwpck_require__(2015) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList, kConstruct } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { types } = __nccwpck_require__(3837) + if (this[kEvents].abort) { + this.removeEventListener('abort', this[kEvents].abort) + } -const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(5356).ReadableStream) -const textEncoder = new TextEncoder('utf-8') + if (typeof fn === 'function') { + this[kEvents].abort = fn + this.addEventListener('abort', fn) + } else { + this[kEvents].abort = null + } + } +} -// https://fetch.spec.whatwg.org/#response-class -class Response { - // Creates network error Response. - static error () { - // TODO - const relevantRealm = { settingsObject: {} } +// https://w3c.github.io/FileAPI/#dom-filereader-empty +FileReader.EMPTY = FileReader.prototype.EMPTY = 0 +// https://w3c.github.io/FileAPI/#dom-filereader-loading +FileReader.LOADING = FileReader.prototype.LOADING = 1 +// https://w3c.github.io/FileAPI/#dom-filereader-done +FileReader.DONE = FileReader.prototype.DONE = 2 - // The static error() method steps are to return the result of creating a - // Response object, given a new network error, "immutable", and this’s - // relevant Realm. - const responseObject = new Response() - responseObject[kState] = makeNetworkError() - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList - responseObject[kHeaders][kGuard] = 'immutable' - responseObject[kHeaders][kRealm] = relevantRealm - return responseObject +Object.defineProperties(FileReader.prototype, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors, + readAsArrayBuffer: kEnumerableProperty, + readAsBinaryString: kEnumerableProperty, + readAsText: kEnumerableProperty, + readAsDataURL: kEnumerableProperty, + abort: kEnumerableProperty, + readyState: kEnumerableProperty, + result: kEnumerableProperty, + error: kEnumerableProperty, + onloadstart: kEnumerableProperty, + onprogress: kEnumerableProperty, + onload: kEnumerableProperty, + onabort: kEnumerableProperty, + onerror: kEnumerableProperty, + onloadend: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'FileReader', + writable: false, + enumerable: false, + configurable: true } +}) - // https://fetch.spec.whatwg.org/#dom-response-json - static json (data, init = {}) { - webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) +Object.defineProperties(FileReader, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors +}) - if (init !== null) { - init = webidl.converters.ResponseInit(init) - } +module.exports = { + FileReader +} - // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. - const bytes = textEncoder.encode( - serializeJavascriptValueToJSONString(data) - ) - // 2. Let body be the result of extracting bytes. - const body = extractBody(bytes) +/***/ }), - // 3. Let responseObject be the result of creating a Response object, given a new response, - // "response", and this’s relevant Realm. - const relevantRealm = { settingsObject: {} } - const responseObject = new Response() - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kGuard] = 'response' - responseObject[kHeaders][kRealm] = relevantRealm +/***/ 55504: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). - initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) +"use strict"; - // 5. Return responseObject. - return responseObject - } - // Creates a redirect Response that redirects to url with status status. - static redirect (url, status = 302) { - const relevantRealm = { settingsObject: {} } +const { webidl } = __nccwpck_require__(21744) - webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) +const kState = Symbol('ProgressEvent state') - url = webidl.converters.USVString(url) - status = webidl.converters['unsigned short'](status) +/** + * @see https://xhr.spec.whatwg.org/#progressevent + */ +class ProgressEvent extends Event { + constructor (type, eventInitDict = {}) { + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) - // 1. Let parsedURL be the result of parsing url with current settings - // object’s API base URL. - // 2. If parsedURL is failure, then throw a TypeError. - // TODO: base-URL? - let parsedURL - try { - parsedURL = new URL(url, getGlobalOrigin()) - } catch (err) { - throw Object.assign(new TypeError('Failed to parse URL from ' + url), { - cause: err - }) - } + super(type, eventInitDict) - // 3. If status is not a redirect status, then throw a RangeError. - if (!redirectStatusSet.has(status)) { - throw new RangeError('Invalid status code ' + status) + this[kState] = { + lengthComputable: eventInitDict.lengthComputable, + loaded: eventInitDict.loaded, + total: eventInitDict.total } + } - // 4. Let responseObject be the result of creating a Response object, - // given a new response, "immutable", and this’s relevant Realm. - const responseObject = new Response() - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kGuard] = 'immutable' - responseObject[kHeaders][kRealm] = relevantRealm - - // 5. Set responseObject’s response’s status to status. - responseObject[kState].status = status + get lengthComputable () { + webidl.brandCheck(this, ProgressEvent) - // 6. Let value be parsedURL, serialized and isomorphic encoded. - const value = isomorphicEncode(URLSerializer(parsedURL)) + return this[kState].lengthComputable + } - // 7. Append `Location`/value to responseObject’s response’s header list. - responseObject[kState].headersList.append('location', value) + get loaded () { + webidl.brandCheck(this, ProgressEvent) - // 8. Return responseObject. - return responseObject + return this[kState].loaded } - // https://fetch.spec.whatwg.org/#dom-response - constructor (body = null, init = {}) { - if (body !== null) { - body = webidl.converters.BodyInit(body) - } + get total () { + webidl.brandCheck(this, ProgressEvent) - init = webidl.converters.ResponseInit(init) + return this[kState].total + } +} - // TODO - this[kRealm] = { settingsObject: {} } +webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ + { + key: 'lengthComputable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'loaded', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'total', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +]) - // 1. Set this’s response to a new response. - this[kState] = makeResponse({}) +module.exports = { + ProgressEvent +} - // 2. Set this’s headers to a new Headers object with this’s relevant - // Realm, whose header list is this’s response’s header list and guard - // is "response". - this[kHeaders] = new Headers(kConstruct) - this[kHeaders][kGuard] = 'response' - this[kHeaders][kHeadersList] = this[kState].headersList - this[kHeaders][kRealm] = this[kRealm] - // 3. Let bodyWithType be null. - let bodyWithType = null +/***/ }), - // 4. If body is non-null, then set bodyWithType to the result of extracting body. - if (body != null) { - const [extractedBody, type] = extractBody(body) - bodyWithType = { body: extractedBody, type } - } +/***/ 29054: +/***/ ((module) => { - // 5. Perform initialize a response given this, init, and bodyWithType. - initializeResponse(this, init, bodyWithType) - } +"use strict"; - // Returns response’s type, e.g., "cors". - get type () { - webidl.brandCheck(this, Response) - // The type getter steps are to return this’s response’s type. - return this[kState].type - } +module.exports = { + kState: Symbol('FileReader state'), + kResult: Symbol('FileReader result'), + kError: Symbol('FileReader error'), + kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), + kEvents: Symbol('FileReader events'), + kAborted: Symbol('FileReader aborted') +} - // Returns response’s URL, if it has one; otherwise the empty string. - get url () { - webidl.brandCheck(this, Response) - const urlList = this[kState].urlList +/***/ }), - // The url getter steps are to return the empty string if this’s - // response’s URL is null; otherwise this’s response’s URL, - // serialized with exclude fragment set to true. - const url = urlList[urlList.length - 1] ?? null +/***/ 87530: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (url === null) { - return '' - } +"use strict"; - return URLSerializer(url, true) - } - // Returns whether response was obtained through a redirect. - get redirected () { - webidl.brandCheck(this, Response) +const { + kState, + kError, + kResult, + kAborted, + kLastProgressEventFired +} = __nccwpck_require__(29054) +const { ProgressEvent } = __nccwpck_require__(55504) +const { getEncoding } = __nccwpck_require__(84854) +const { DOMException } = __nccwpck_require__(41037) +const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(685) +const { types } = __nccwpck_require__(73837) +const { StringDecoder } = __nccwpck_require__(71576) +const { btoa } = __nccwpck_require__(14300) - // The redirected getter steps are to return true if this’s response’s URL - // list has more than one item; otherwise false. - return this[kState].urlList.length > 1 +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +/** + * @see https://w3c.github.io/FileAPI/#readOperation + * @param {import('./filereader').FileReader} fr + * @param {import('buffer').Blob} blob + * @param {string} type + * @param {string?} encodingName + */ +function readOperation (fr, blob, type, encodingName) { + // 1. If fr’s state is "loading", throw an InvalidStateError + // DOMException. + if (fr[kState] === 'loading') { + throw new DOMException('Invalid state', 'InvalidStateError') } - // Returns response’s status. - get status () { - webidl.brandCheck(this, Response) + // 2. Set fr’s state to "loading". + fr[kState] = 'loading' - // The status getter steps are to return this’s response’s status. - return this[kState].status - } + // 3. Set fr’s result to null. + fr[kResult] = null - // Returns whether response’s status is an ok status. - get ok () { - webidl.brandCheck(this, Response) + // 4. Set fr’s error to null. + fr[kError] = null - // The ok getter steps are to return true if this’s response’s status is an - // ok status; otherwise false. - return this[kState].status >= 200 && this[kState].status <= 299 - } + // 5. Let stream be the result of calling get stream on blob. + /** @type {import('stream/web').ReadableStream} */ + const stream = blob.stream() - // Returns response’s status message. - get statusText () { - webidl.brandCheck(this, Response) + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader() - // The statusText getter steps are to return this’s response’s status - // message. - return this[kState].statusText - } + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const bytes = [] - // Returns response’s headers as Headers. - get headers () { - webidl.brandCheck(this, Response) + // 8. Let chunkPromise be the result of reading a chunk from + // stream with reader. + let chunkPromise = reader.read() - // The headers getter steps are to return this’s headers. - return this[kHeaders] - } + // 9. Let isFirstChunk be true. + let isFirstChunk = true - get body () { - webidl.brandCheck(this, Response) + // 10. In parallel, while true: + // Note: "In parallel" just means non-blocking + // Note 2: readOperation itself cannot be async as double + // reading the body would then reject the promise, instead + // of throwing an error. + ;(async () => { + while (!fr[kAborted]) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const { done, value } = await chunkPromise - return this[kState].body ? this[kState].body.stream : null - } + // 2. If chunkPromise is fulfilled, and isFirstChunk is + // true, queue a task to fire a progress event called + // loadstart at fr. + if (isFirstChunk && !fr[kAborted]) { + queueMicrotask(() => { + fireAProgressEvent('loadstart', fr) + }) + } - get bodyUsed () { - webidl.brandCheck(this, Response) + // 3. Set isFirstChunk to false. + isFirstChunk = false - return !!this[kState].body && util.isDisturbed(this[kState].body.stream) - } + // 4. If chunkPromise is fulfilled with an object whose + // done property is false and whose value property is + // a Uint8Array object, run these steps: + if (!done && types.isUint8Array(value)) { + // 1. Let bs be the byte sequence represented by the + // Uint8Array object. - // Returns a clone of response. - clone () { - webidl.brandCheck(this, Response) + // 2. Append bs to bytes. + bytes.push(value) - // 1. If this is unusable, then throw a TypeError. - if (this.bodyUsed || (this.body && this.body.locked)) { - throw webidl.errors.exception({ - header: 'Response.clone', - message: 'Body has already been consumed.' - }) - } + // 3. If roughly 50ms have passed since these steps + // were last invoked, queue a task to fire a + // progress event called progress at fr. + if ( + ( + fr[kLastProgressEventFired] === undefined || + Date.now() - fr[kLastProgressEventFired] >= 50 + ) && + !fr[kAborted] + ) { + fr[kLastProgressEventFired] = Date.now() + queueMicrotask(() => { + fireAProgressEvent('progress', fr) + }) + } - // 2. Let clonedResponse be the result of cloning this’s response. - const clonedResponse = cloneResponse(this[kState]) + // 4. Set chunkPromise to the result of reading a + // chunk from stream with reader. + chunkPromise = reader.read() + } else if (done) { + // 5. Otherwise, if chunkPromise is fulfilled with an + // object whose done property is true, queue a task + // to run the following steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' - // 3. Return the result of creating a Response object, given - // clonedResponse, this’s headers’s guard, and this’s relevant Realm. - const clonedResponseObject = new Response() - clonedResponseObject[kState] = clonedResponse - clonedResponseObject[kRealm] = this[kRealm] - clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList - clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] - clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + // 2. Let result be the result of package data given + // bytes, type, blob’s type, and encodingName. + try { + const result = packageData(bytes, type, blob.type, encodingName) - return clonedResponseObject - } -} + // 4. Else: -mixinBody(Response) + if (fr[kAborted]) { + return + } -Object.defineProperties(Response.prototype, { - type: kEnumerableProperty, - url: kEnumerableProperty, - status: kEnumerableProperty, - ok: kEnumerableProperty, - redirected: kEnumerableProperty, - statusText: kEnumerableProperty, - headers: kEnumerableProperty, - clone: kEnumerableProperty, - body: kEnumerableProperty, - bodyUsed: kEnumerableProperty, - [Symbol.toStringTag]: { - value: 'Response', - configurable: true - } -}) + // 1. Set fr’s result to result. + fr[kResult] = result -Object.defineProperties(Response, { - json: kEnumerableProperty, - redirect: kEnumerableProperty, - error: kEnumerableProperty -}) + // 2. Fire a progress event called load at the fr. + fireAProgressEvent('load', fr) + } catch (error) { + // 3. If package data threw an exception error: -// https://fetch.spec.whatwg.org/#concept-response-clone -function cloneResponse (response) { - // To clone a response response, run these steps: + // 1. Set fr’s error to error. + fr[kError] = error - // 1. If response is a filtered response, then return a new identical - // filtered response whose internal response is a clone of response’s - // internal response. - if (response.internalResponse) { - return filterResponse( - cloneResponse(response.internalResponse), - response.type - ) - } + // 2. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + } - // 2. Let newResponse be a copy of response, except for its body. - const newResponse = makeResponse({ ...response, body: null }) + // 5. If fr’s state is not "loading", fire a progress + // event called loadend at the fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) - // 3. If response’s body is non-null, then set newResponse’s body to the - // result of cloning response’s body. - if (response.body != null) { - newResponse.body = cloneBody(response.body) - } + break + } + } catch (error) { + if (fr[kAborted]) { + return + } - // 4. Return newResponse. - return newResponse -} + // 6. Otherwise, if chunkPromise is rejected with an + // error error, queue a task to run the following + // steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' -function makeResponse (init) { - return { - aborted: false, - rangeRequested: false, - timingAllowPassed: false, - requestIncludesCredentials: false, - type: 'default', - status: 200, - timingInfo: null, - cacheState: '', - statusText: '', - ...init, - headersList: init.headersList - ? new HeadersList(init.headersList) - : new HeadersList(), - urlList: init.urlList ? [...init.urlList] : [] - } -} + // 2. Set fr’s error to error. + fr[kError] = error -function makeNetworkError (reason) { - const isError = isErrorLike(reason) - return makeResponse({ - type: 'error', - status: 0, - error: isError - ? reason - : new Error(reason ? String(reason) : reason), - aborted: reason && reason.name === 'AbortError' - }) -} + // 3. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) -function makeFilteredResponse (response, state) { - state = { - internalResponse: response, - ...state - } + // 4. If fr’s state is not "loading", fire a progress + // event called loadend at fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) - return new Proxy(response, { - get (target, p) { - return p in state ? state[p] : target[p] - }, - set (target, p, value) { - assert(!(p in state)) - target[p] = value - return true + break + } } + })() +} + +/** + * @see https://w3c.github.io/FileAPI/#fire-a-progress-event + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e The name of the event + * @param {import('./filereader').FileReader} reader + */ +function fireAProgressEvent (e, reader) { + // The progress event e does not bubble. e.bubbles must be false + // The progress event e is NOT cancelable. e.cancelable must be false + const event = new ProgressEvent(e, { + bubbles: false, + cancelable: false }) + + reader.dispatchEvent(event) } -// https://fetch.spec.whatwg.org/#concept-filtered-response -function filterResponse (response, type) { - // Set response to the following filtered response with response as its - // internal response, depending on request’s response tainting: - if (type === 'basic') { - // A basic filtered response is a filtered response whose type is "basic" - // and header list excludes any headers in internal response’s header list - // whose name is a forbidden response-header name. +/** + * @see https://w3c.github.io/FileAPI/#blob-package-data + * @param {Uint8Array[]} bytes + * @param {string} type + * @param {string?} mimeType + * @param {string?} encodingName + */ +function packageData (bytes, type, mimeType, encodingName) { + // 1. A Blob has an associated package data algorithm, given + // bytes, a type, a optional mimeType, and a optional + // encodingName, which switches on type and runs the + // associated steps: - // Note: undici does not implement forbidden response-header names - return makeFilteredResponse(response, { - type: 'basic', - headersList: response.headersList - }) - } else if (type === 'cors') { - // A CORS filtered response is a filtered response whose type is "cors" - // and header list excludes any headers in internal response’s header - // list whose name is not a CORS-safelisted response-header name, given - // internal response’s CORS-exposed header-name list. + switch (type) { + case 'DataURL': { + // 1. Return bytes as a DataURL [RFC2397] subject to + // the considerations below: + // * Use mimeType as part of the Data URL if it is + // available in keeping with the Data URL + // specification [RFC2397]. + // * If mimeType is not available return a Data URL + // without a media-type. [RFC2397]. - // Note: undici does not implement CORS-safelisted response-header names - return makeFilteredResponse(response, { - type: 'cors', - headersList: response.headersList - }) - } else if (type === 'opaque') { - // An opaque filtered response is a filtered response whose type is - // "opaque", URL list is the empty list, status is 0, status message - // is the empty byte sequence, header list is empty, and body is null. + // https://datatracker.ietf.org/doc/html/rfc2397#section-3 + // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + // mediatype := [ type "/" subtype ] *( ";" parameter ) + // data := *urlchar + // parameter := attribute "=" value + let dataURL = 'data:' - return makeFilteredResponse(response, { - type: 'opaque', - urlList: Object.freeze([]), - status: 0, - statusText: '', - body: null - }) - } else if (type === 'opaqueredirect') { - // An opaque-redirect filtered response is a filtered response whose type - // is "opaqueredirect", status is 0, status message is the empty byte - // sequence, header list is empty, and body is null. + const parsed = parseMIMEType(mimeType || 'application/octet-stream') - return makeFilteredResponse(response, { - type: 'opaqueredirect', - status: 0, - statusText: '', - headersList: [], - body: null - }) - } else { - assert(false) - } -} + if (parsed !== 'failure') { + dataURL += serializeAMimeType(parsed) + } -// https://fetch.spec.whatwg.org/#appropriate-network-error -function makeAppropriateNetworkError (fetchParams, err = null) { - // 1. Assert: fetchParams is canceled. - assert(isCancelled(fetchParams)) + dataURL += ';base64,' - // 2. Return an aborted network error if fetchParams is aborted; - // otherwise return a network error. - return isAborted(fetchParams) - ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) - : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) -} + const decoder = new StringDecoder('latin1') -// https://whatpr.org/fetch/1392.html#initialize-a-response -function initializeResponse (response, init, body) { - // 1. If init["status"] is not in the range 200 to 599, inclusive, then - // throw a RangeError. - if (init.status !== null && (init.status < 200 || init.status > 599)) { - throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') - } + for (const chunk of bytes) { + dataURL += btoa(decoder.write(chunk)) + } - // 2. If init["statusText"] does not match the reason-phrase token production, - // then throw a TypeError. - if ('statusText' in init && init.statusText != null) { - // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: - // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) - if (!isValidReasonPhrase(String(init.statusText))) { - throw new TypeError('Invalid statusText') + dataURL += btoa(decoder.end()) + + return dataURL } - } + case 'Text': { + // 1. Let encoding be failure + let encoding = 'failure' - // 3. Set response’s response’s status to init["status"]. - if ('status' in init && init.status != null) { - response[kState].status = init.status - } + // 2. If the encodingName is present, set encoding to the + // result of getting an encoding from encodingName. + if (encodingName) { + encoding = getEncoding(encodingName) + } - // 4. Set response’s response’s status message to init["statusText"]. - if ('statusText' in init && init.statusText != null) { - response[kState].statusText = init.statusText - } + // 3. If encoding is failure, and mimeType is present: + if (encoding === 'failure' && mimeType) { + // 1. Let type be the result of parse a MIME type + // given mimeType. + const type = parseMIMEType(mimeType) - // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. - if ('headers' in init && init.headers != null) { - fill(response[kHeaders], init.headers) - } + // 2. If type is not failure, set encoding to the result + // of getting an encoding from type’s parameters["charset"]. + if (type !== 'failure') { + encoding = getEncoding(type.parameters.get('charset')) + } + } - // 6. If body was given, then: - if (body) { - // 1. If response's status is a null body status, then throw a TypeError. - if (nullBodyStatus.includes(response.status)) { - throw webidl.errors.exception({ - header: 'Response constructor', - message: 'Invalid response status code ' + response.status - }) + // 4. If encoding is failure, then set encoding to UTF-8. + if (encoding === 'failure') { + encoding = 'UTF-8' + } + + // 5. Decode bytes using fallback encoding encoding, and + // return the result. + return decode(bytes, encoding) } + case 'ArrayBuffer': { + // Return a new ArrayBuffer whose contents are bytes. + const sequence = combineByteSequences(bytes) - // 2. Set response's body to body's body. - response[kState].body = body.body + return sequence.buffer + } + case 'BinaryString': { + // Return bytes as a binary string, in which every byte + // is represented by a code unit of equal value [0..255]. + let binaryString = '' - // 3. If body's type is non-null and response's header list does not contain - // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. - if (body.type != null && !response[kState].headersList.contains('Content-Type')) { - response[kState].headersList.append('content-type', body.type) + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + binaryString += decoder.write(chunk) + } + + binaryString += decoder.end() + + return binaryString } } } -webidl.converters.ReadableStream = webidl.interfaceConverter( - ReadableStream -) - -webidl.converters.FormData = webidl.interfaceConverter( - FormData -) +/** + * @see https://encoding.spec.whatwg.org/#decode + * @param {Uint8Array[]} ioQueue + * @param {string} encoding + */ +function decode (ioQueue, encoding) { + const bytes = combineByteSequences(ioQueue) -webidl.converters.URLSearchParams = webidl.interfaceConverter( - URLSearchParams -) + // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. + const BOMEncoding = BOMSniffing(bytes) -// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit -webidl.converters.XMLHttpRequestBodyInit = function (V) { - if (typeof V === 'string') { - return webidl.converters.USVString(V) - } + let slice = 0 - if (isBlobLike(V)) { - return webidl.converters.Blob(V, { strict: false }) - } + // 2. If BOMEncoding is non-null: + if (BOMEncoding !== null) { + // 1. Set encoding to BOMEncoding. + encoding = BOMEncoding - if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { - return webidl.converters.BufferSource(V) + // 2. Read three bytes from ioQueue, if BOMEncoding is + // UTF-8; otherwise read two bytes. + // (Do nothing with those bytes.) + slice = BOMEncoding === 'UTF-8' ? 3 : 2 } - if (util.isFormDataLike(V)) { - return webidl.converters.FormData(V, { strict: false }) - } + // 3. Process a queue with an instance of encoding’s + // decoder, ioQueue, output, and "replacement". - if (V instanceof URLSearchParams) { - return webidl.converters.URLSearchParams(V) - } + // 4. Return output. - return webidl.converters.DOMString(V) + const sliced = bytes.slice(slice) + return new TextDecoder(encoding).decode(sliced) } -// https://fetch.spec.whatwg.org/#bodyinit -webidl.converters.BodyInit = function (V) { - if (V instanceof ReadableStream) { - return webidl.converters.ReadableStream(V) - } +/** + * @see https://encoding.spec.whatwg.org/#bom-sniff + * @param {Uint8Array} ioQueue + */ +function BOMSniffing (ioQueue) { + // 1. Let BOM be the result of peeking 3 bytes from ioQueue, + // converted to a byte sequence. + const [a, b, c] = ioQueue - // Note: the spec doesn't include async iterables, - // this is an undici extension. - if (V?.[Symbol.asyncIterator]) { - return V + // 2. For each of the rows in the table below, starting with + // the first one and going down, if BOM starts with the + // bytes given in the first column, then return the + // encoding given in the cell in the second column of that + // row. Otherwise, return null. + if (a === 0xEF && b === 0xBB && c === 0xBF) { + return 'UTF-8' + } else if (a === 0xFE && b === 0xFF) { + return 'UTF-16BE' + } else if (a === 0xFF && b === 0xFE) { + return 'UTF-16LE' } - return webidl.converters.XMLHttpRequestBodyInit(V) + return null } -webidl.converters.ResponseInit = webidl.dictionaryConverter([ - { - key: 'status', - converter: webidl.converters['unsigned short'], - defaultValue: 200 - }, - { - key: 'statusText', - converter: webidl.converters.ByteString, - defaultValue: '' - }, - { - key: 'headers', - converter: webidl.converters.HeadersInit - } -]) +/** + * @param {Uint8Array[]} sequences + */ +function combineByteSequences (sequences) { + const size = sequences.reduce((a, b) => { + return a + b.byteLength + }, 0) + + let offset = 0 + + return sequences.reduce((a, b) => { + a.set(b, offset) + offset += b.byteLength + return a + }, new Uint8Array(size)) +} module.exports = { - makeNetworkError, - makeResponse, - makeAppropriateNetworkError, - filterResponse, - Response, - cloneResponse + staticPropertyDescriptors, + readOperation, + fireAProgressEvent } /***/ }), -/***/ 5861: -/***/ ((module) => { +/***/ 21892: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = { - kUrl: Symbol('url'), - kHeaders: Symbol('headers'), - kSignal: Symbol('signal'), - kState: Symbol('state'), - kGuard: Symbol('guard'), - kRealm: Symbol('realm') +// We include a version number for the Dispatcher API. In case of breaking changes, +// this version number must be increased to avoid conflicts. +const globalDispatcher = Symbol.for('undici.globalDispatcher.1') +const { InvalidArgumentError } = __nccwpck_require__(48045) +const Agent = __nccwpck_require__(7890) + +if (getGlobalDispatcher() === undefined) { + setGlobalDispatcher(new Agent()) } +function setGlobalDispatcher (agent) { + if (!agent || typeof agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument agent must implement Agent') + } + Object.defineProperty(globalThis, globalDispatcher, { + value: agent, + writable: true, + enumerable: false, + configurable: false + }) +} -/***/ }), +function getGlobalDispatcher () { + return globalThis[globalDispatcher] +} -/***/ 2538: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +module.exports = { + setGlobalDispatcher, + getGlobalDispatcher +} -"use strict"; +/***/ }), -const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(1037) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { performance } = __nccwpck_require__(4074) -const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983) -const assert = __nccwpck_require__(9491) -const { isUint8Array } = __nccwpck_require__(9830) +/***/ 46930: +/***/ ((module) => { -// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable -/** @type {import('crypto')|undefined} */ -let crypto +"use strict"; -try { - crypto = __nccwpck_require__(6113) -} catch { -} +module.exports = class DecoratorHandler { + constructor (handler) { + this.handler = handler + } -function responseURL (response) { - // https://fetch.spec.whatwg.org/#responses - // A response has an associated URL. It is a pointer to the last URL - // in response’s URL list and null if response’s URL list is empty. - const urlList = response.urlList - const length = urlList.length - return length === 0 ? null : urlList[length - 1].toString() -} + onConnect (...args) { + return this.handler.onConnect(...args) + } -// https://fetch.spec.whatwg.org/#concept-response-location-url -function responseLocationURL (response, requestFragment) { - // 1. If response’s status is not a redirect status, then return null. - if (!redirectStatusSet.has(response.status)) { - return null + onError (...args) { + return this.handler.onError(...args) } - // 2. Let location be the result of extracting header list values given - // `Location` and response’s header list. - let location = response.headersList.get('location') + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } - // 3. If location is a header value, then set location to the result of - // parsing location with response’s URL. - if (location !== null && isValidHeaderValue(location)) { - location = new URL(location, responseURL(response)) + onHeaders (...args) { + return this.handler.onHeaders(...args) } - // 4. If location is a URL whose fragment is null, then set location’s - // fragment to requestFragment. - if (location && !location.hash) { - location.hash = requestFragment + onData (...args) { + return this.handler.onData(...args) } - // 5. Return location. - return location -} + onComplete (...args) { + return this.handler.onComplete(...args) + } -/** @returns {URL} */ -function requestCurrentURL (request) { - return request.urlList[request.urlList.length - 1] + onBodySent (...args) { + return this.handler.onBodySent(...args) + } } -function requestBadPort (request) { - // 1. Let url be request’s current URL. - const url = requestCurrentURL(request) - // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, - // then return blocked. - if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { - return 'blocked' - } +/***/ }), - // 3. Return allowed. - return 'allowed' -} +/***/ 72860: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function isErrorLike (object) { - return object instanceof Error || ( - object?.constructor?.name === 'Error' || - object?.constructor?.name === 'DOMException' - ) -} +"use strict"; -// Check whether |statusText| is a ByteString and -// matches the Reason-Phrase token production. -// RFC 2616: https://tools.ietf.org/html/rfc2616 -// RFC 7230: https://tools.ietf.org/html/rfc7230 -// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" -// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 -function isValidReasonPhrase (statusText) { - for (let i = 0; i < statusText.length; ++i) { - const c = statusText.charCodeAt(i) - if ( - !( - ( - c === 0x09 || // HTAB - (c >= 0x20 && c <= 0x7e) || // SP / VCHAR - (c >= 0x80 && c <= 0xff) - ) // obs-text - ) - ) { - return false - } - } - return true -} -/** - * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 - * @param {number} c - */ -function isTokenCharCode (c) { - switch (c) { - case 0x22: - case 0x28: - case 0x29: - case 0x2c: - case 0x2f: - case 0x3a: - case 0x3b: - case 0x3c: - case 0x3d: - case 0x3e: - case 0x3f: - case 0x40: - case 0x5b: - case 0x5c: - case 0x5d: - case 0x7b: - case 0x7d: - // DQUOTE and "(),/:;<=>?@[\]{}" - return false - default: - // VCHAR %x21-7E - return c >= 0x21 && c <= 0x7e - } -} +const util = __nccwpck_require__(83983) +const { kBodyUsed } = __nccwpck_require__(72785) +const assert = __nccwpck_require__(39491) +const { InvalidArgumentError } = __nccwpck_require__(48045) +const EE = __nccwpck_require__(82361) -/** - * @param {string} characters - */ -function isValidHTTPToken (characters) { - if (characters.length === 0) { - return false - } - for (let i = 0; i < characters.length; ++i) { - if (!isTokenCharCode(characters.charCodeAt(i))) { - return false - } - } - return true -} +const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] -/** - * @see https://fetch.spec.whatwg.org/#header-name - * @param {string} potentialValue - */ -function isValidHeaderName (potentialValue) { - return isValidHTTPToken(potentialValue) -} +const kBody = Symbol('body') -/** - * @see https://fetch.spec.whatwg.org/#header-value - * @param {string} potentialValue - */ -function isValidHeaderValue (potentialValue) { - // - Has no leading or trailing HTTP tab or space bytes. - // - Contains no 0x00 (NUL) or HTTP newline bytes. - if ( - potentialValue.startsWith('\t') || - potentialValue.startsWith(' ') || - potentialValue.endsWith('\t') || - potentialValue.endsWith(' ') - ) { - return false +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false } - if ( - potentialValue.includes('\0') || - potentialValue.includes('\r') || - potentialValue.includes('\n') - ) { - return false + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] } - - return true } -// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect -function setRequestReferrerPolicyOnRedirect (request, actualResponse) { - // Given a request request and a response actualResponse, this algorithm - // updates request’s referrer policy according to the Referrer-Policy - // header (if any) in actualResponse. +class RedirectHandler { + constructor (dispatch, maxRedirections, opts, handler) { + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + util.validateHandler(handler, opts.method, opts.upgrade) - // 1. Let policy be the result of executing § 8.1 Parse a referrer policy - // from a Referrer-Policy header on actualResponse. + this.dispatch = dispatch + this.location = null + this.abort = null + this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy + this.maxRedirections = maxRedirections + this.handler = handler + this.history = [] - // 8.1 Parse a referrer policy from a Referrer-Policy header - // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. - const { headersList } = actualResponse - // 2. Let policy be the empty string. - // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. - // 4. Return policy. - const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + if (util.isStream(this.opts.body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (util.bodyLength(this.opts.body) === 0) { + this.opts.body + .on('data', function () { + assert(false) + }) + } - // Note: As the referrer-policy can contain multiple policies - // separated by comma, we need to loop through all of them - // and pick the first valid one. - // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy - let policy = '' - if (policyHeader.length > 0) { - // The right-most policy takes precedence. - // The left-most policy is the fallback. - for (let i = policyHeader.length; i !== 0; i--) { - const token = policyHeader[i - 1].trim() - if (referrerPolicyTokens.has(token)) { - policy = token - break + if (typeof this.opts.body.readableDidRead !== 'boolean') { + this.opts.body[kBodyUsed] = false + EE.prototype.on.call(this.opts.body, 'data', function () { + this[kBodyUsed] = true + }) } + } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + this.opts.body = new BodyAsyncIterable(this.opts.body) + } else if ( + this.opts.body && + typeof this.opts.body !== 'string' && + !ArrayBuffer.isView(this.opts.body) && + util.isIterable(this.opts.body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + this.opts.body = new BodyAsyncIterable(this.opts.body) } } - // 2. If policy is not the empty string, then set request’s referrer policy to policy. - if (policy !== '') { - request.referrerPolicy = policy + onConnect (abort) { + this.abort = abort + this.handler.onConnect(abort, { history: this.history }) } -} -// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check -function crossOriginResourcePolicyCheck () { - // TODO - return 'allowed' -} + onUpgrade (statusCode, headers, socket) { + this.handler.onUpgrade(statusCode, headers, socket) + } -// https://fetch.spec.whatwg.org/#concept-cors-check -function corsCheck () { - // TODO - return 'success' -} + onError (error) { + this.handler.onError(error) + } -// https://fetch.spec.whatwg.org/#concept-tao-check -function TAOCheck () { - // TODO - return 'success' -} + onHeaders (statusCode, headers, resume, statusText) { + this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) + ? null + : parseLocation(statusCode, headers) -function appendFetchMetadata (httpRequest) { - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header - // TODO + if (this.opts.origin) { + this.history.push(new URL(this.opts.path, this.opts.origin)) + } - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + if (!this.location) { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } - // 1. Assert: r’s url is a potentially trustworthy URL. - // TODO + const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) + const path = search ? `${pathname}${search}` : pathname - // 2. Let header be a Structured Header whose value is a token. - let header = null + // Remove headers referring to the original URL. + // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. + // https://tools.ietf.org/html/rfc7231#section-6.4 + this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) + this.opts.path = path + this.opts.origin = origin + this.opts.maxRedirections = 0 + this.opts.query = null - // 3. Set header’s value to r’s mode. - header = httpRequest.mode + // https://tools.ietf.org/html/rfc7231#section-6.4.4 + // In case of HTTP 303, always replace method to be either HEAD or GET + if (statusCode === 303 && this.opts.method !== 'HEAD') { + this.opts.method = 'GET' + this.opts.body = null + } + } - // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. - httpRequest.headersList.set('sec-fetch-mode', header) + onData (chunk) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header - // TODO + TLDR: undici always ignores 3xx response bodies. - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header - // TODO -} + Redirection is used to serve the requested resource from another URL, so it is assumes that + no body is generated (and thus can be ignored). Even though generating a body is not prohibited. -// https://fetch.spec.whatwg.org/#append-a-request-origin-header -function appendRequestOriginHeader (request) { - // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. - let serializedOrigin = request.origin + For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually + (which means it's optional and not mandated) contain just an hyperlink to the value of + the Location response header, so the body can be ignored safely. - // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. - if (request.responseTainting === 'cors' || request.mode === 'websocket') { - if (serializedOrigin) { - request.headersList.append('origin', serializedOrigin) + For status 300, which is "Multiple Choices", the spec mentions both generating a Location + response header AND a response body with the other possible location to follow. + Since the spec explicitily chooses not to specify a format for such body and leave it to + servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. + */ + } else { + return this.handler.onData(chunk) } + } - // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: - } else if (request.method !== 'GET' && request.method !== 'HEAD') { - // 1. Switch on request’s referrer policy: - switch (request.referrerPolicy) { - case 'no-referrer': - // Set serializedOrigin to `null`. - serializedOrigin = null - break - case 'no-referrer-when-downgrade': - case 'strict-origin': - case 'strict-origin-when-cross-origin': - // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. - if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { - serializedOrigin = null - } - break - case 'same-origin': - // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. - if (!sameOrigin(request, requestCurrentURL(request))) { - serializedOrigin = null - } - break - default: - // Do nothing. + onComplete (trailers) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections + and neither are useful if present. + + See comment on onData method above for more detailed informations. + */ + + this.location = null + this.abort = null + + this.dispatch(this.opts, this) + } else { + this.handler.onComplete(trailers) } + } - if (serializedOrigin) { - // 2. Append (`Origin`, serializedOrigin) to request’s header list. - request.headersList.append('origin', serializedOrigin) + onBodySent (chunk) { + if (this.handler.onBodySent) { + this.handler.onBodySent(chunk) } } } -function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { - // TODO - return performance.now() -} +function parseLocation (statusCode, headers) { + if (redirectableStatusCodes.indexOf(statusCode) === -1) { + return null + } -// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info -function createOpaqueTimingInfo (timingInfo) { - return { - startTime: timingInfo.startTime ?? 0, - redirectStartTime: 0, - redirectEndTime: 0, - postRedirectStartTime: timingInfo.startTime ?? 0, - finalServiceWorkerStartTime: 0, - finalNetworkResponseStartTime: 0, - finalNetworkRequestStartTime: 0, - endTime: 0, - encodedBodySize: 0, - decodedBodySize: 0, - finalConnectionTimingInfo: null + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toString().toLowerCase() === 'location') { + return headers[i + 1] + } } } -// https://html.spec.whatwg.org/multipage/origin.html#policy-container -function makePolicyContainer () { - // Note: the fetch spec doesn't make use of embedder policy or CSP list - return { - referrerPolicy: 'strict-origin-when-cross-origin' +// https://tools.ietf.org/html/rfc7231#section-6.4.4 +function shouldRemoveHeader (header, removeContent, unknownOrigin) { + if (header.length === 4) { + return util.headerNameToString(header) === 'host' } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } -// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container -function clonePolicyContainer (policyContainer) { - return { - referrerPolicy: policyContainer.referrerPolicy +// https://tools.ietf.org/html/rfc7231#section-6.4 +function cleanRequestHeaders (headers, removeContent, unknownOrigin) { + const ret = [] + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { + ret.push(headers[i], headers[i + 1]) + } + } + } else if (headers && typeof headers === 'object') { + for (const key of Object.keys(headers)) { + if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { + ret.push(key, headers[key]) + } + } + } else { + assert(headers == null, 'headers must be an object or an array') } + return ret } -// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer -function determineRequestsReferrer (request) { - // 1. Let policy be request's referrer policy. - const policy = request.referrerPolicy +module.exports = RedirectHandler - // Note: policy cannot (shouldn't) be null or an empty string. - assert(policy) - // 2. Let environment be request’s client. +/***/ }), - let referrerSource = null +/***/ 82286: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 3. Switch on request’s referrer: - if (request.referrer === 'client') { - // Note: node isn't a browser and doesn't implement document/iframes, - // so we bypass this step and replace it with our own. +const assert = __nccwpck_require__(39491) - const globalOrigin = getGlobalOrigin() +const { kRetryHandlerDefaultRetry } = __nccwpck_require__(72785) +const { RequestRetryError } = __nccwpck_require__(48045) +const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(83983) - if (!globalOrigin || globalOrigin.origin === 'null') { - return 'no-referrer' +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] } - // note: we need to clone it as it's mutated - referrerSource = new URL(globalOrigin) - } else if (request.referrer instanceof URL) { - // Let referrerSource be request’s referrer. - referrerSource = request.referrer + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) } - // 4. Let request’s referrerURL be the result of stripping referrerSource for - // use as a referrer. - let referrerURL = stripURLForReferrer(referrerSource) + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } - // 5. Let referrerOrigin be the result of stripping referrerSource for use as - // a referrer, with the origin-only flag set to true. - const referrerOrigin = stripURLForReferrer(referrerSource, true) + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } - // 6. If the result of serializing referrerURL is a string whose length is - // greater than 4096, set referrerURL to referrerOrigin. - if (referrerURL.toString().length > 4096) { - referrerURL = referrerOrigin + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } } - const areSameOrigin = sameOrigin(request, referrerURL) - const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && - !isURLPotentiallyTrustworthy(request.url) + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } - // 8. Execute the switch statements corresponding to the value of policy: - switch (policy) { - case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) - case 'unsafe-url': return referrerURL - case 'same-origin': - return areSameOrigin ? referrerOrigin : 'no-referrer' - case 'origin-when-cross-origin': - return areSameOrigin ? referrerURL : referrerOrigin - case 'strict-origin-when-cross-origin': { - const currentURL = requestCurrentURL(request) + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state - // 1. If the origin of referrerURL and the origin of request’s current - // URL are the same, then return referrerURL. - if (sameOrigin(referrerURL, currentURL)) { - return referrerURL - } + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout - // 2. If referrerURL is a potentially trustworthy URL and request’s - // current URL is not a potentially trustworthy URL, then return no - // referrer. - if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { - return 'no-referrer' - } + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } - // 3. Return referrerOrigin. - return referrerOrigin + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return } - case 'strict-origin': // eslint-disable-line - /** - * 1. If referrerURL is a potentially trustworthy URL and - * request’s current URL is not a potentially trustworthy URL, - * then return no referrer. - * 2. Return referrerOrigin - */ - case 'no-referrer-when-downgrade': // eslint-disable-line - /** - * 1. If referrerURL is a potentially trustworthy URL and - * request’s current URL is not a potentially trustworthy URL, - * then return no referrer. - * 2. Return referrerOrigin - */ - default: // eslint-disable-line - return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } + + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } + + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } + + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + + state.currentTimeout = retryTimeout + + setTimeout(() => cb(null), retryTimeout) } -} -/** - * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url - * @param {URL} url - * @param {boolean|undefined} originOnly - */ -function stripURLForReferrer (url, originOnly) { - // 1. Assert: url is a URL. - assert(url instanceof URL) + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) + + this.retryCount += 1 + + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } - // 2. If url’s scheme is a local scheme, then return no referrer. - if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { - return 'no-referrer' - } + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null - // 3. Set url’s username to the empty string. - url.username = '' + if (statusCode !== 206) { + return true + } - // 4. Set url’s password to the empty string. - url.password = '' + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } - // 5. Set url’s fragment to null. - url.hash = '' + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } - // 6. If the origin-only flag is true, then: - if (originOnly) { - // 1. Set url’s path to « the empty string ». - url.pathname = '' + const { start, size, end = size } = contentRange - // 2. Set url’s query to null. - url.search = '' - } + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') - // 7. Return url. - return url -} + this.resume = resume + return true + } -function isURLPotentiallyTrustworthy (url) { - if (!(url instanceof URL)) { - return false - } + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) - // If child of about, return true - if (url.href === 'about:blank' || url.href === 'about:srcdoc') { - return true - } + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } - // If scheme is data, return true - if (url.protocol === 'data:') return true + const { start, size, end = size } = range - // If file, return true - if (url.protocol === 'file:') return true + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) - return isOriginPotentiallyTrustworthy(url.origin) + this.start = start + this.end = end + } - function isOriginPotentiallyTrustworthy (origin) { - // If origin is explicitly null, return false - if (origin == null || origin === 'null') return false + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } - const originAsURL = new URL(origin) + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) - // If secure, return true - if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { - return true - } + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null - // If localhost or variants, return true - if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || - (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || - (originAsURL.hostname.endsWith('.localhost'))) { - return true + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) } - // If any other, return false - return false - } -} + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) -/** - * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist - * @param {Uint8Array} bytes - * @param {string} metadataList - */ -function bytesMatch (bytes, metadataList) { - // If node is not built with OpenSSL support, we cannot check - // a request's integrity, so allow it by default (the spec will - // allow requests if an invalid hash is given, as precedence). - /* istanbul ignore if: only if node is built with --without-ssl */ - if (crypto === undefined) { - return true + this.abort(err) + + return false } - // 1. Let parsedMetadata be the result of parsing metadataList. - const parsedMetadata = parseMetadata(metadataList) + onData (chunk) { + this.start += chunk.length - // 2. If parsedMetadata is no metadata, return true. - if (parsedMetadata === 'no metadata') { - return true + return this.handler.onData(chunk) } - // 3. If parsedMetadata is the empty set, return true. - if (parsedMetadata.length === 0) { - return true + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) } - // 4. Let metadata be the result of getting the strongest - // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } - // 5. For each item in metadata: - for (const item of metadata) { - // 1. Let algorithm be the alg component of item. - const algorithm = item.algo + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) - // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } - // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e - // "be liberal with padding". This is annoying, and it's not even in the spec. + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) + } } + } +} - // 3. Let actualValue be the result of applying algorithm to bytes. - let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') +module.exports = RetryHandler - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) - } - // 4. If actualValue is a case-sensitive match for expectedValue, - // return true. - if (actualValue === expectedValue) { - return true - } +/***/ }), - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') +/***/ 38861: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } +"use strict"; - if (actualBase64URL === expectedValue) { - return true + +const RedirectHandler = __nccwpck_require__(72860) + +function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { + return (dispatch) => { + return function Intercept (opts, handler) { + const { maxRedirections = defaultMaxRedirections } = opts + + if (!maxRedirections) { + return dispatch(opts, handler) + } + + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) + opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. + return dispatch(opts, redirectHandler) } } - - // 6. Return false. - return false } -// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options -// https://www.w3.org/TR/CSP2/#source-list-syntax -// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +module.exports = createRedirectInterceptor -/** - * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata - * @param {string} metadata - */ -function parseMetadata (metadata) { - // 1. Let result be the empty set. - /** @type {{ algo: string, hash: string }[]} */ - const result = [] - // 2. Let empty be equal to true. - let empty = true +/***/ }), + +/***/ 30953: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const supportedHashes = crypto.getHashes() +"use strict"; - // 3. For each token returned by splitting metadata on spaces: - for (const token of metadata.split(' ')) { - // 1. Set empty to false. - empty = false +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; +const utils_1 = __nccwpck_require__(41891); +// C headers +var ERROR; +(function (ERROR) { + ERROR[ERROR["OK"] = 0] = "OK"; + ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; + ERROR[ERROR["STRICT"] = 2] = "STRICT"; + ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; + ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; + ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; + ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; + ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; + ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; + ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; + ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; + ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; + ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; + ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; + ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; + ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; + ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; + ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; + ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; + ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; + ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; + ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; + ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; + ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; + ERROR[ERROR["USER"] = 24] = "USER"; +})(ERROR = exports.ERROR || (exports.ERROR = {})); +var TYPE; +(function (TYPE) { + TYPE[TYPE["BOTH"] = 0] = "BOTH"; + TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; + TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; +})(TYPE = exports.TYPE || (exports.TYPE = {})); +var FLAGS; +(function (FLAGS) { + FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; + FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; + FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; + FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; + FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; + FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; + FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; + FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; + // 1 << 8 is unused + FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; +})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); +var LENIENT_FLAGS; +(function (LENIENT_FLAGS) { + LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; + LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; + LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; +})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); +var METHODS; +(function (METHODS) { + METHODS[METHODS["DELETE"] = 0] = "DELETE"; + METHODS[METHODS["GET"] = 1] = "GET"; + METHODS[METHODS["HEAD"] = 2] = "HEAD"; + METHODS[METHODS["POST"] = 3] = "POST"; + METHODS[METHODS["PUT"] = 4] = "PUT"; + /* pathological */ + METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; + METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; + METHODS[METHODS["TRACE"] = 7] = "TRACE"; + /* WebDAV */ + METHODS[METHODS["COPY"] = 8] = "COPY"; + METHODS[METHODS["LOCK"] = 9] = "LOCK"; + METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; + METHODS[METHODS["MOVE"] = 11] = "MOVE"; + METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; + METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; + METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; + METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; + METHODS[METHODS["BIND"] = 16] = "BIND"; + METHODS[METHODS["REBIND"] = 17] = "REBIND"; + METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; + METHODS[METHODS["ACL"] = 19] = "ACL"; + /* subversion */ + METHODS[METHODS["REPORT"] = 20] = "REPORT"; + METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; + METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; + METHODS[METHODS["MERGE"] = 23] = "MERGE"; + /* upnp */ + METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; + METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; + METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; + METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; + /* RFC-5789 */ + METHODS[METHODS["PATCH"] = 28] = "PATCH"; + METHODS[METHODS["PURGE"] = 29] = "PURGE"; + /* CalDAV */ + METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; + /* RFC-2068, section 19.6.1.2 */ + METHODS[METHODS["LINK"] = 31] = "LINK"; + METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; + /* icecast */ + METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; + /* RFC-7540, section 11.6 */ + METHODS[METHODS["PRI"] = 34] = "PRI"; + /* RFC-2326 RTSP */ + METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; + METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; + METHODS[METHODS["SETUP"] = 37] = "SETUP"; + METHODS[METHODS["PLAY"] = 38] = "PLAY"; + METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; + METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; + METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; + METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; + METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; + METHODS[METHODS["RECORD"] = 44] = "RECORD"; + /* RAOP */ + METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; +})(METHODS = exports.METHODS || (exports.METHODS = {})); +exports.METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; +exports.METHODS_ICE = [ + METHODS.SOURCE, +]; +exports.METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + // For AirPlay + METHODS.GET, + METHODS.POST, +]; +exports.METHOD_MAP = utils_1.enumToMap(METHODS); +exports.H_METHOD_MAP = {}; +Object.keys(exports.METHOD_MAP).forEach((key) => { + if (/^H/.test(key)) { + exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; + } +}); +var FINISH; +(function (FINISH) { + FINISH[FINISH["SAFE"] = 0] = "SAFE"; + FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; + FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; +})(FINISH = exports.FINISH || (exports.FINISH = {})); +exports.ALPHA = []; +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + exports.ALPHA.push(String.fromCharCode(i)); + // Lower case + exports.ALPHA.push(String.fromCharCode(i + 0x20)); +} +exports.NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; +exports.HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; +exports.NUM = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; +exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); +exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; +exports.USERINFO_CHARS = exports.ALPHANUM + .concat(exports.MARK) + .concat(['%', ';', ':', '&', '=', '+', '$', ',']); +// TODO(indutny): use RFC +exports.STRICT_URL_CHAR = [ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +].concat(exports.ALPHANUM); +exports.URL_CHAR = exports.STRICT_URL_CHAR + .concat(['\t', '\f']); +// All characters with 0x80 bit set to 1 +for (let i = 0x80; i <= 0xff; i++) { + exports.URL_CHAR.push(i); +} +exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +exports.STRICT_TOKEN = [ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +].concat(exports.ALPHANUM); +exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +exports.HEADER_CHARS = ['\t']; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + exports.HEADER_CHARS.push(i); + } +} +// ',' = \x44 +exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); +exports.MAJOR = exports.NUM_MAP; +exports.MINOR = exports.MAJOR; +var HEADER_STATE; +(function (HEADER_STATE) { + HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; + HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; + HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; + HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; + HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; + HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; + HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; +})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); +exports.SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; +//# sourceMappingURL=constants.js.map - // 2. Parse token as a hash-with-options. - const parsedToken = parseHashWithOptions.exec(token) +/***/ }), - // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { - // Note: Chromium blocks the request at this point, but Firefox - // gives a warning that an invalid integrity was given. The - // correct behavior is to ignore these, and subsequently not - // check the integrity of the resource. - continue - } +/***/ 61145: +/***/ ((module) => { - // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' - // 5. If algorithm is a hash function recognized by the user - // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { - result.push(parsedToken.groups) - } - } - // 4. Return no metadata if empty is true, otherwise return result. - if (empty === true) { - return 'no metadata' - } +/***/ }), - return result -} +/***/ 95627: +/***/ ((module) => { -// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request -function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { - // TODO -} +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' -/** - * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} - * @param {URL} A - * @param {URL} B - */ -function sameOrigin (A, B) { - // 1. If A and B are the same opaque origin, then return true. - if (A.origin === B.origin && A.origin === 'null') { - return true - } - // 2. If A and B are both tuple origins and their schemes, - // hosts, and port are identical, then return true. - if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { - return true - } +/***/ }), - // 3. Return false. - return false -} +/***/ 41891: +/***/ ((__unused_webpack_module, exports) => { -function createDeferredPromise () { - let res - let rej - const promise = new Promise((resolve, reject) => { - res = resolve - rej = reject - }) +"use strict"; - return { promise, resolve: res, reject: rej } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.enumToMap = void 0; +function enumToMap(obj) { + const res = {}; + Object.keys(obj).forEach((key) => { + const value = obj[key]; + if (typeof value === 'number') { + res[key] = value; + } + }); + return res; } +exports.enumToMap = enumToMap; +//# sourceMappingURL=utils.js.map -function isAborted (fetchParams) { - return fetchParams.controller.state === 'aborted' -} +/***/ }), -function isCancelled (fetchParams) { - return fetchParams.controller.state === 'aborted' || - fetchParams.controller.state === 'terminated' -} +/***/ 66771: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const normalizeMethodRecord = { - delete: 'DELETE', - DELETE: 'DELETE', - get: 'GET', - GET: 'GET', - head: 'HEAD', - HEAD: 'HEAD', - options: 'OPTIONS', - OPTIONS: 'OPTIONS', - post: 'POST', - POST: 'POST', - put: 'PUT', - PUT: 'PUT' -} +"use strict"; -// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. -Object.setPrototypeOf(normalizeMethodRecord, null) -/** - * @see https://fetch.spec.whatwg.org/#concept-method-normalize - * @param {string} method - */ -function normalizeMethod (method) { - return normalizeMethodRecord[method.toLowerCase()] ?? method -} +const { kClients } = __nccwpck_require__(72785) +const Agent = __nccwpck_require__(7890) +const { + kAgent, + kMockAgentSet, + kMockAgentGet, + kDispatches, + kIsMockActive, + kNetConnect, + kGetNetConnect, + kOptions, + kFactory +} = __nccwpck_require__(24347) +const MockClient = __nccwpck_require__(58687) +const MockPool = __nccwpck_require__(26193) +const { matchValue, buildMockOptions } = __nccwpck_require__(79323) +const { InvalidArgumentError, UndiciError } = __nccwpck_require__(48045) +const Dispatcher = __nccwpck_require__(60412) +const Pluralizer = __nccwpck_require__(78891) +const PendingInterceptorsFormatter = __nccwpck_require__(86823) -// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string -function serializeJavascriptValueToJSONString (value) { - // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). - const result = JSON.stringify(value) +class FakeWeakRef { + constructor (value) { + this.value = value + } - // 2. If result is undefined, then throw a TypeError. - if (result === undefined) { - throw new TypeError('Value is not JSON serializable') + deref () { + return this.value } +} - // 3. Assert: result is a string. - assert(typeof result === 'string') +class MockAgent extends Dispatcher { + constructor (opts) { + super(opts) - // 4. Return result. - return result -} + this[kNetConnect] = true + this[kIsMockActive] = true -// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object -const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + // Instantiate Agent and encapsulate + if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + const agent = opts && opts.agent ? opts.agent : new Agent(opts) + this[kAgent] = agent -/** - * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object - * @param {() => unknown[]} iterator - * @param {string} name name of the instance - * @param {'key'|'value'|'key+value'} kind - */ -function makeIterator (iterator, name, kind) { - const object = { - index: 0, - kind, - target: iterator + this[kClients] = agent[kClients] + this[kOptions] = buildMockOptions(opts) } - const i = { - next () { - // 1. Let interface be the interface for which the iterator prototype object exists. - - // 2. Let thisValue be the this value. + get (origin) { + let dispatcher = this[kMockAgentGet](origin) - // 3. Let object be ? ToObject(thisValue). + if (!dispatcher) { + dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + } + return dispatcher + } - // 4. If object is a platform object, then perform a security - // check, passing: + dispatch (opts, handler) { + // Call MockAgent.get to perform additional setup before dispatching as normal + this.get(opts.origin) + return this[kAgent].dispatch(opts, handler) + } - // 5. If object is not a default iterator object for interface, - // then throw a TypeError. - if (Object.getPrototypeOf(this) !== i) { - throw new TypeError( - `'next' called on an object that does not implement interface ${name} Iterator.` - ) - } + async close () { + await this[kAgent].close() + this[kClients].clear() + } - // 6. Let index be object’s index. - // 7. Let kind be object’s kind. - // 8. Let values be object’s target's value pairs to iterate over. - const { index, kind, target } = object - const values = target() + deactivate () { + this[kIsMockActive] = false + } - // 9. Let len be the length of values. - const len = values.length + activate () { + this[kIsMockActive] = true + } - // 10. If index is greater than or equal to len, then return - // CreateIterResultObject(undefined, true). - if (index >= len) { - return { value: undefined, done: true } + enableNetConnect (matcher) { + if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { + if (Array.isArray(this[kNetConnect])) { + this[kNetConnect].push(matcher) + } else { + this[kNetConnect] = [matcher] } + } else if (typeof matcher === 'undefined') { + this[kNetConnect] = true + } else { + throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') + } + } - // 11. Let pair be the entry in values at index index. - const pair = values[index] - - // 12. Set object’s index to index + 1. - object.index = index + 1 + disableNetConnect () { + this[kNetConnect] = false + } - // 13. Return the iterator result for pair and kind. - return iteratorResult(pair, kind) - }, - // The class string of an iterator prototype object for a given interface is the - // result of concatenating the identifier of the interface and the string " Iterator". - [Symbol.toStringTag]: `${name} Iterator` + // This is required to bypass issues caused by using global symbols - see: + // https://github.com/nodejs/undici/issues/1447 + get isMockActive () { + return this[kIsMockActive] } - // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. - Object.setPrototypeOf(i, esIteratorPrototype) - // esIteratorPrototype needs to be the prototype of i - // which is the prototype of an empty object. Yes, it's confusing. - return Object.setPrototypeOf({}, i) -} + [kMockAgentSet] (origin, dispatcher) { + this[kClients].set(origin, new FakeWeakRef(dispatcher)) + } -// https://webidl.spec.whatwg.org/#iterator-result -function iteratorResult (pair, kind) { - let result + [kFactory] (origin) { + const mockOptions = Object.assign({ agent: this }, this[kOptions]) + return this[kOptions] && this[kOptions].connections === 1 + ? new MockClient(origin, mockOptions) + : new MockPool(origin, mockOptions) + } - // 1. Let result be a value determined by the value of kind: - switch (kind) { - case 'key': { - // 1. Let idlKey be pair’s key. - // 2. Let key be the result of converting idlKey to an - // ECMAScript value. - // 3. result is key. - result = pair[0] - break + [kMockAgentGet] (origin) { + // First check if we can immediately find it + const ref = this[kClients].get(origin) + if (ref) { + return ref.deref() } - case 'value': { - // 1. Let idlValue be pair’s value. - // 2. Let value be the result of converting idlValue to - // an ECMAScript value. - // 3. result is value. - result = pair[1] - break + + // If the origin is not a string create a dummy parent pool and return to user + if (typeof origin !== 'string') { + const dispatcher = this[kFactory]('http://localhost:9999') + this[kMockAgentSet](origin, dispatcher) + return dispatcher } - case 'key+value': { - // 1. Let idlKey be pair’s key. - // 2. Let idlValue be pair’s value. - // 3. Let key be the result of converting idlKey to an - // ECMAScript value. - // 4. Let value be the result of converting idlValue to - // an ECMAScript value. - // 5. Let array be ! ArrayCreate(2). - // 6. Call ! CreateDataProperty(array, "0", key). - // 7. Call ! CreateDataProperty(array, "1", value). - // 8. result is array. - result = pair - break + + // If we match, create a pool and assign the same dispatches + for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { + const nonExplicitDispatcher = nonExplicitRef.deref() + if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { + const dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] + return dispatcher + } } } - // 2. Return CreateIterResultObject(result, false). - return { value: result, done: false } -} - -/** - * @see https://fetch.spec.whatwg.org/#body-fully-read - */ -async function fullyReadBody (body, processBody, processBodyError) { - // 1. If taskDestination is null, then set taskDestination to - // the result of starting a new parallel queue. + [kGetNetConnect] () { + return this[kNetConnect] + } - // 2. Let successSteps given a byte sequence bytes be to queue a - // fetch task to run processBody given bytes, with taskDestination. - const successSteps = processBody + pendingInterceptors () { + const mockAgentClients = this[kClients] - // 3. Let errorSteps be to queue a fetch task to run processBodyError, - // with taskDestination. - const errorSteps = processBodyError + return Array.from(mockAgentClients.entries()) + .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) + .filter(({ pending }) => pending) + } - // 4. Let reader be the result of getting a reader for body’s stream. - // If that threw an exception, then run errorSteps with that - // exception and return. - let reader + assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { + const pending = this.pendingInterceptors() - try { - reader = body.stream.getReader() - } catch (e) { - errorSteps(e) - return - } + if (pending.length === 0) { + return + } - // 5. Read all bytes from reader, given successSteps and errorSteps. - try { - const result = await readAllBytes(reader) - successSteps(result) - } catch (e) { - errorSteps(e) - } -} + const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) -/** @type {ReadableStream} */ -let ReadableStream = globalThis.ReadableStream + throw new UndiciError(` +${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: -function isReadableStreamLike (stream) { - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) +${pendingInterceptorsFormatter.format(pending)} +`.trim()) } - - return stream instanceof ReadableStream || ( - stream[Symbol.toStringTag] === 'ReadableStream' && - typeof stream.tee === 'function' - ) } -const MAXIMUM_ARGUMENT_LENGTH = 65535 +module.exports = MockAgent -/** - * @see https://infra.spec.whatwg.org/#isomorphic-decode - * @param {number[]|Uint8Array} input - */ -function isomorphicDecode (input) { - // 1. To isomorphic decode a byte sequence input, return a string whose code point - // length is equal to input’s length and whose code points have the same values - // as the values of input’s bytes, in the same order. - if (input.length < MAXIMUM_ARGUMENT_LENGTH) { - return String.fromCharCode(...input) - } +/***/ }), - return input.reduce((previous, current) => previous + String.fromCharCode(current), '') -} +/***/ 58687: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * @param {ReadableStreamController} controller - */ -function readableStreamClose (controller) { - try { - controller.close() - } catch (err) { - // TODO: add comment explaining why this error occurs. - if (!err.message.includes('Controller is already closed')) { - throw err - } - } -} +"use strict"; -/** - * @see https://infra.spec.whatwg.org/#isomorphic-encode - * @param {string} input - */ -function isomorphicEncode (input) { - // 1. Assert: input contains no code points greater than U+00FF. - for (let i = 0; i < input.length; i++) { - assert(input.charCodeAt(i) <= 0xFF) - } - // 2. Return a byte sequence whose length is equal to input’s code - // point length and whose bytes have the same values as the - // values of input’s code points, in the same order - return input -} +const { promisify } = __nccwpck_require__(73837) +const Client = __nccwpck_require__(33598) +const { buildMockDispatch } = __nccwpck_require__(79323) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(24347) +const { MockInterceptor } = __nccwpck_require__(90410) +const Symbols = __nccwpck_require__(72785) +const { InvalidArgumentError } = __nccwpck_require__(48045) /** - * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes - * @see https://streams.spec.whatwg.org/#read-loop - * @param {ReadableStreamDefaultReader} reader + * MockClient provides an API that extends the Client to influence the mockDispatches. */ -async function readAllBytes (reader) { - const bytes = [] - let byteLength = 0 - - while (true) { - const { done, value: chunk } = await reader.read() +class MockClient extends Client { + constructor (origin, opts) { + super(origin, opts) - if (done) { - // 1. Call successSteps with bytes. - return Buffer.concat(bytes, byteLength) + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') } - // 1. If chunk is not a Uint8Array object, call failureSteps - // with a TypeError and abort these steps. - if (!isUint8Array(chunk)) { - throw new TypeError('Received non-Uint8Array chunk') - } + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) - // 2. Append the bytes represented by chunk to bytes. - bytes.push(chunk) - byteLength += chunk.length + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } - // 3. Read-loop given reader, bytes, successSteps, and failureSteps. + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) } } -/** - * @see https://fetch.spec.whatwg.org/#is-local - * @param {URL} url - */ -function urlIsLocal (url) { - assert('protocol' in url) // ensure it's a url object +module.exports = MockClient - const protocol = url.protocol - return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' -} +/***/ }), -/** - * @param {string|URL} url - */ -function urlHasHttpsScheme (url) { - if (typeof url === 'string') { - return url.startsWith('https:') - } +/***/ 50888: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return url.protocol === 'https:' -} +"use strict"; -/** - * @see https://fetch.spec.whatwg.org/#http-scheme - * @param {URL} url - */ -function urlIsHttpHttpsScheme (url) { - assert('protocol' in url) // ensure it's a url object - const protocol = url.protocol +const { UndiciError } = __nccwpck_require__(48045) - return protocol === 'http:' || protocol === 'https:' +class MockNotMatchedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, MockNotMatchedError) + this.name = 'MockNotMatchedError' + this.message = message || 'The request does not match any registered mock dispatches' + this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' + } } -/** - * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. - */ -const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) - module.exports = { - isAborted, - isCancelled, - createDeferredPromise, - ReadableStreamFrom, - toUSVString, - tryUpgradeRequestToAPotentiallyTrustworthyURL, - coarsenedSharedCurrentTime, - determineRequestsReferrer, - makePolicyContainer, - clonePolicyContainer, - appendFetchMetadata, - appendRequestOriginHeader, - TAOCheck, - corsCheck, - crossOriginResourcePolicyCheck, - createOpaqueTimingInfo, - setRequestReferrerPolicyOnRedirect, - isValidHTTPToken, - requestBadPort, - requestCurrentURL, - responseURL, - responseLocationURL, - isBlobLike, - isURLPotentiallyTrustworthy, - isValidReasonPhrase, - sameOrigin, - normalizeMethod, - serializeJavascriptValueToJSONString, - makeIterator, - isValidHeaderName, - isValidHeaderValue, - hasOwn, - isErrorLike, - fullyReadBody, - bytesMatch, - isReadableStreamLike, - readableStreamClose, - isomorphicEncode, - isomorphicDecode, - urlIsLocal, - urlHasHttpsScheme, - urlIsHttpHttpsScheme, - readAllBytes, - normalizeMethodRecord + MockNotMatchedError } /***/ }), -/***/ 1744: +/***/ 90410: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { types } = __nccwpck_require__(3837) -const { hasOwn, toUSVString } = __nccwpck_require__(2538) - -/** @type {import('../../types/webidl').Webidl} */ -const webidl = {} -webidl.converters = {} -webidl.util = {} -webidl.errors = {} - -webidl.errors.exception = function (message) { - return new TypeError(`${message.header}: ${message.message}`) -} - -webidl.errors.conversionFailed = function (context) { - const plural = context.types.length === 1 ? '' : ' one of' - const message = - `${context.argument} could not be converted to` + - `${plural}: ${context.types.join(', ')}.` +const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(79323) +const { + kDispatches, + kDispatchKey, + kDefaultHeaders, + kDefaultTrailers, + kContentLength, + kMockDispatch +} = __nccwpck_require__(24347) +const { InvalidArgumentError } = __nccwpck_require__(48045) +const { buildURL } = __nccwpck_require__(83983) - return webidl.errors.exception({ - header: context.prefix, - message - }) -} +/** + * Defines the scope API for an interceptor reply + */ +class MockScope { + constructor (mockDispatch) { + this[kMockDispatch] = mockDispatch + } -webidl.errors.invalidArgument = function (context) { - return webidl.errors.exception({ - header: context.prefix, - message: `"${context.value}" is an invalid ${context.type}.` - }) -} + /** + * Delay a reply by a set amount in ms. + */ + delay (waitInMs) { + if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { + throw new InvalidArgumentError('waitInMs must be a valid integer > 0') + } -// https://webidl.spec.whatwg.org/#implements -webidl.brandCheck = function (V, I, opts = undefined) { - if (opts?.strict !== false && !(V instanceof I)) { - throw new TypeError('Illegal invocation') - } else { - return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] + this[kMockDispatch].delay = waitInMs + return this } -} -webidl.argumentLengthCheck = function ({ length }, min, ctx) { - if (length < min) { - throw webidl.errors.exception({ - message: `${min} argument${min !== 1 ? 's' : ''} required, ` + - `but${length ? ' only' : ''} ${length} found.`, - ...ctx - }) + /** + * For a defined reply, never mark as consumed. + */ + persist () { + this[kMockDispatch].persist = true + return this } -} -webidl.illegalConstructor = function () { - throw webidl.errors.exception({ - header: 'TypeError', - message: 'Illegal constructor' - }) + /** + * Allow one to define a reply for a set amount of matching requests. + */ + times (repeatTimes) { + if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { + throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') + } + + this[kMockDispatch].times = repeatTimes + return this + } } -// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values -webidl.util.Type = function (V) { - switch (typeof V) { - case 'undefined': return 'Undefined' - case 'boolean': return 'Boolean' - case 'string': return 'String' - case 'symbol': return 'Symbol' - case 'number': return 'Number' - case 'bigint': return 'BigInt' - case 'function': - case 'object': { - if (V === null) { - return 'Null' +/** + * Defines an interceptor for a Mock + */ +class MockInterceptor { + constructor (opts, mockDispatches) { + if (typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object') + } + if (typeof opts.path === 'undefined') { + throw new InvalidArgumentError('opts.path must be defined') + } + if (typeof opts.method === 'undefined') { + opts.method = 'GET' + } + // See https://github.com/nodejs/undici/issues/1245 + // As per RFC 3986, clients are not supposed to send URI + // fragments to servers when they retrieve a document, + if (typeof opts.path === 'string') { + if (opts.query) { + opts.path = buildURL(opts.path, opts.query) + } else { + // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 + const parsedURL = new URL(opts.path, 'data://') + opts.path = parsedURL.pathname + parsedURL.search } - - return 'Object' } + if (typeof opts.method === 'string') { + opts.method = opts.method.toUpperCase() + } + + this[kDispatchKey] = buildKey(opts) + this[kDispatches] = mockDispatches + this[kDefaultHeaders] = {} + this[kDefaultTrailers] = {} + this[kContentLength] = false } -} -// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint -webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { - let upperBound - let lowerBound + createMockScopeDispatchData (statusCode, data, responseOptions = {}) { + const responseData = getResponseData(data) + const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} + const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } + const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } - // 1. If bitLength is 64, then: - if (bitLength === 64) { - // 1. Let upperBound be 2^53 − 1. - upperBound = Math.pow(2, 53) - 1 + return { statusCode, data, headers, trailers } + } - // 2. If signedness is "unsigned", then let lowerBound be 0. - if (signedness === 'unsigned') { - lowerBound = 0 - } else { - // 3. Otherwise let lowerBound be −2^53 + 1. - lowerBound = Math.pow(-2, 53) + 1 + validateReplyParameters (statusCode, data, responseOptions) { + if (typeof statusCode === 'undefined') { + throw new InvalidArgumentError('statusCode must be defined') } - } else if (signedness === 'unsigned') { - // 2. Otherwise, if signedness is "unsigned", then: + if (typeof data === 'undefined') { + throw new InvalidArgumentError('data must be defined') + } + if (typeof responseOptions !== 'object') { + throw new InvalidArgumentError('responseOptions must be an object') + } + } - // 1. Let lowerBound be 0. - lowerBound = 0 + /** + * Mock an undici request with a defined reply. + */ + reply (replyData) { + // Values of reply aren't available right now as they + // can only be available when the reply callback is invoked. + if (typeof replyData === 'function') { + // We'll first wrap the provided callback in another function, + // this function will properly resolve the data from the callback + // when invoked. + const wrappedDefaultsCallback = (opts) => { + // Our reply options callback contains the parameter for statusCode, data and options. + const resolvedData = replyData(opts) - // 2. Let upperBound be 2^bitLength − 1. - upperBound = Math.pow(2, bitLength) - 1 - } else { - // 3. Otherwise: + // Check if it is in the right format + if (typeof resolvedData !== 'object') { + throw new InvalidArgumentError('reply options callback must return an object') + } - // 1. Let lowerBound be -2^bitLength − 1. - lowerBound = Math.pow(-2, bitLength) - 1 + const { statusCode, data = '', responseOptions = {} } = resolvedData + this.validateReplyParameters(statusCode, data, responseOptions) + // Since the values can be obtained immediately we return them + // from this higher order function that will be resolved later. + return { + ...this.createMockScopeDispatchData(statusCode, data, responseOptions) + } + } - // 2. Let upperBound be 2^bitLength − 1 − 1. - upperBound = Math.pow(2, bitLength - 1) - 1 - } + // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) + return new MockScope(newMockDispatch) + } - // 4. Let x be ? ToNumber(V). - let x = Number(V) + // We can have either one or three parameters, if we get here, + // we should have 1-3 parameters. So we spread the arguments of + // this function to obtain the parameters, since replyData will always + // just be the statusCode. + const [statusCode, data = '', responseOptions = {}] = [...arguments] + this.validateReplyParameters(statusCode, data, responseOptions) - // 5. If x is −0, then set x to +0. - if (x === 0) { - x = 0 + // Send in-already provided data like usual + const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) + return new MockScope(newMockDispatch) } - // 6. If the conversion is to an IDL type associated - // with the [EnforceRange] extended attribute, then: - if (opts.enforceRange === true) { - // 1. If x is NaN, +∞, or −∞, then throw a TypeError. - if ( - Number.isNaN(x) || - x === Number.POSITIVE_INFINITY || - x === Number.NEGATIVE_INFINITY - ) { - throw webidl.errors.exception({ - header: 'Integer conversion', - message: `Could not convert ${V} to an integer.` - }) + /** + * Mock an undici request with a defined error. + */ + replyWithError (error) { + if (typeof error === 'undefined') { + throw new InvalidArgumentError('error must be defined') } - // 2. Set x to IntegerPart(x). - x = webidl.util.IntegerPart(x) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) + return new MockScope(newMockDispatch) + } - // 3. If x < lowerBound or x > upperBound, then - // throw a TypeError. - if (x < lowerBound || x > upperBound) { - throw webidl.errors.exception({ - header: 'Integer conversion', - message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` - }) + /** + * Set default reply headers on the interceptor for subsequent replies + */ + defaultReplyHeaders (headers) { + if (typeof headers === 'undefined') { + throw new InvalidArgumentError('headers must be defined') } - // 4. Return x. - return x + this[kDefaultHeaders] = headers + return this } - // 7. If x is not NaN and the conversion is to an IDL - // type associated with the [Clamp] extended - // attribute, then: - if (!Number.isNaN(x) && opts.clamp === true) { - // 1. Set x to min(max(x, lowerBound), upperBound). - x = Math.min(Math.max(x, lowerBound), upperBound) - - // 2. Round x to the nearest integer, choosing the - // even integer if it lies halfway between two, - // and choosing +0 rather than −0. - if (Math.floor(x) % 2 === 0) { - x = Math.floor(x) - } else { - x = Math.ceil(x) + /** + * Set default reply trailers on the interceptor for subsequent replies + */ + defaultReplyTrailers (trailers) { + if (typeof trailers === 'undefined') { + throw new InvalidArgumentError('trailers must be defined') } - // 3. Return x. - return x + this[kDefaultTrailers] = trailers + return this } - // 8. If x is NaN, +0, +∞, or −∞, then return +0. - if ( - Number.isNaN(x) || - (x === 0 && Object.is(0, x)) || - x === Number.POSITIVE_INFINITY || - x === Number.NEGATIVE_INFINITY - ) { - return 0 + /** + * Set reply content length header for replies on the interceptor + */ + replyContentLength () { + this[kContentLength] = true + return this } +} - // 9. Set x to IntegerPart(x). - x = webidl.util.IntegerPart(x) - - // 10. Set x to x modulo 2^bitLength. - x = x % Math.pow(2, bitLength) +module.exports.MockInterceptor = MockInterceptor +module.exports.MockScope = MockScope - // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, - // then return x − 2^bitLength. - if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { - return x - Math.pow(2, bitLength) - } - // 12. Otherwise, return x. - return x -} +/***/ }), -// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart -webidl.util.IntegerPart = function (n) { - // 1. Let r be floor(abs(n)). - const r = Math.floor(Math.abs(n)) +/***/ 26193: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 2. If n < 0, then return -1 × r. - if (n < 0) { - return -1 * r - } +"use strict"; - // 3. Otherwise, return r. - return r -} -// https://webidl.spec.whatwg.org/#es-sequence -webidl.sequenceConverter = function (converter) { - return (V) => { - // 1. If Type(V) is not Object, throw a TypeError. - if (webidl.util.Type(V) !== 'Object') { - throw webidl.errors.exception({ - header: 'Sequence', - message: `Value of type ${webidl.util.Type(V)} is not an Object.` - }) - } +const { promisify } = __nccwpck_require__(73837) +const Pool = __nccwpck_require__(4634) +const { buildMockDispatch } = __nccwpck_require__(79323) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(24347) +const { MockInterceptor } = __nccwpck_require__(90410) +const Symbols = __nccwpck_require__(72785) +const { InvalidArgumentError } = __nccwpck_require__(48045) - // 2. Let method be ? GetMethod(V, @@iterator). - /** @type {Generator} */ - const method = V?.[Symbol.iterator]?.() - const seq = [] +/** + * MockPool provides an API that extends the Pool to influence the mockDispatches. + */ +class MockPool extends Pool { + constructor (origin, opts) { + super(origin, opts) - // 3. If method is undefined, throw a TypeError. - if ( - method === undefined || - typeof method.next !== 'function' - ) { - throw webidl.errors.exception({ - header: 'Sequence', - message: 'Object is not an iterator.' - }) + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') } - // https://webidl.spec.whatwg.org/#create-sequence-from-iterable - while (true) { - const { done, value } = method.next() + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) - if (done) { - break - } + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } - seq.push(converter(value)) - } + get [Symbols.kConnected] () { + return this[kConnected] + } - return seq + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) } } -// https://webidl.spec.whatwg.org/#es-to-record -webidl.recordConverter = function (keyConverter, valueConverter) { - return (O) => { - // 1. If Type(O) is not Object, throw a TypeError. - if (webidl.util.Type(O) !== 'Object') { - throw webidl.errors.exception({ - header: 'Record', - message: `Value of type ${webidl.util.Type(O)} is not an Object.` - }) - } +module.exports = MockPool - // 2. Let result be a new empty instance of record. - const result = {} - if (!types.isProxy(O)) { - // Object.keys only returns enumerable properties - const keys = Object.keys(O) +/***/ }), + +/***/ 24347: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kAgent: Symbol('agent'), + kOptions: Symbol('options'), + kFactory: Symbol('factory'), + kDispatches: Symbol('dispatches'), + kDispatchKey: Symbol('dispatch key'), + kDefaultHeaders: Symbol('default headers'), + kDefaultTrailers: Symbol('default trailers'), + kContentLength: Symbol('content length'), + kMockAgent: Symbol('mock agent'), + kMockAgentSet: Symbol('mock agent set'), + kMockAgentGet: Symbol('mock agent get'), + kMockDispatch: Symbol('mock dispatch'), + kClose: Symbol('close'), + kOriginalClose: Symbol('original agent close'), + kOrigin: Symbol('origin'), + kIsMockActive: Symbol('is mock active'), + kNetConnect: Symbol('net connect'), + kGetNetConnect: Symbol('get net connect'), + kConnected: Symbol('connected') +} - for (const key of keys) { - // 1. Let typedKey be key converted to an IDL value of type K. - const typedKey = keyConverter(key) - // 2. Let value be ? Get(O, key). - // 3. Let typedValue be value converted to an IDL value of type V. - const typedValue = valueConverter(O[key]) +/***/ }), - // 4. Set result[typedKey] to typedValue. - result[typedKey] = typedValue - } +/***/ 79323: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 5. Return result. - return result - } +"use strict"; - // 3. Let keys be ? O.[[OwnPropertyKeys]](). - const keys = Reflect.ownKeys(O) - // 4. For each key of keys. - for (const key of keys) { - // 1. Let desc be ? O.[[GetOwnProperty]](key). - const desc = Reflect.getOwnPropertyDescriptor(O, key) +const { MockNotMatchedError } = __nccwpck_require__(50888) +const { + kDispatches, + kMockAgent, + kOriginalDispatch, + kOrigin, + kGetNetConnect +} = __nccwpck_require__(24347) +const { buildURL, nop } = __nccwpck_require__(83983) +const { STATUS_CODES } = __nccwpck_require__(13685) +const { + types: { + isPromise + } +} = __nccwpck_require__(73837) - // 2. If desc is not undefined and desc.[[Enumerable]] is true: - if (desc?.enumerable) { - // 1. Let typedKey be key converted to an IDL value of type K. - const typedKey = keyConverter(key) +function matchValue (match, value) { + if (typeof match === 'string') { + return match === value + } + if (match instanceof RegExp) { + return match.test(value) + } + if (typeof match === 'function') { + return match(value) === true + } + return false +} - // 2. Let value be ? Get(O, key). - // 3. Let typedValue be value converted to an IDL value of type V. - const typedValue = valueConverter(O[key]) +function lowerCaseEntries (headers) { + return Object.fromEntries( + Object.entries(headers).map(([headerName, headerValue]) => { + return [headerName.toLocaleLowerCase(), headerValue] + }) + ) +} - // 4. Set result[typedKey] to typedValue. - result[typedKey] = typedValue +/** + * @param {import('../../index').Headers|string[]|Record} headers + * @param {string} key + */ +function getHeaderByName (headers, key) { + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { + return headers[i + 1] } } - // 5. Return result. - return result + return undefined + } else if (typeof headers.get === 'function') { + return headers.get(key) + } else { + return lowerCaseEntries(headers)[key.toLocaleLowerCase()] } } -webidl.interfaceConverter = function (i) { - return (V, opts = {}) => { - if (opts.strict !== false && !(V instanceof i)) { - throw webidl.errors.exception({ - header: i.name, - message: `Expected ${V} to be an instance of ${i.name}.` - }) - } - - return V +/** @param {string[]} headers */ +function buildHeadersFromArray (headers) { // fetch HeadersList + const clone = headers.slice() + const entries = [] + for (let index = 0; index < clone.length; index += 2) { + entries.push([clone[index], clone[index + 1]]) } + return Object.fromEntries(entries) } -webidl.dictionaryConverter = function (converters) { - return (dictionary) => { - const type = webidl.util.Type(dictionary) - const dict = {} - - if (type === 'Null' || type === 'Undefined') { - return dict - } else if (type !== 'Object') { - throw webidl.errors.exception({ - header: 'Dictionary', - message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` - }) +function matchHeaders (mockDispatch, headers) { + if (typeof mockDispatch.headers === 'function') { + if (Array.isArray(headers)) { // fetch HeadersList + headers = buildHeadersFromArray(headers) } + return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) + } + if (typeof mockDispatch.headers === 'undefined') { + return true + } + if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { + return false + } - for (const options of converters) { - const { key, defaultValue, required, converter } = options + for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { + const headerValue = getHeaderByName(headers, matchHeaderName) - if (required === true) { - if (!hasOwn(dictionary, key)) { - throw webidl.errors.exception({ - header: 'Dictionary', - message: `Missing required key "${key}".` - }) - } - } + if (!matchValue(matchHeaderValue, headerValue)) { + return false + } + } + return true +} - let value = dictionary[key] - const hasDefault = hasOwn(options, 'defaultValue') +function safeUrl (path) { + if (typeof path !== 'string') { + return path + } - // Only use defaultValue if value is undefined and - // a defaultValue options was provided. - if (hasDefault && value !== null) { - value = value ?? defaultValue - } + const pathSegments = path.split('?') - // A key can be optional and have no default value. - // When this happens, do not perform a conversion, - // and do not assign the key a value. - if (required || hasDefault || value !== undefined) { - value = converter(value) + if (pathSegments.length !== 2) { + return path + } - if ( - options.allowedValues && - !options.allowedValues.includes(value) - ) { - throw webidl.errors.exception({ - header: 'Dictionary', - message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` - }) - } + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') +} - dict[key] = value - } - } +function matchKey (mockDispatch, { path, method, body, headers }) { + const pathMatch = matchValue(mockDispatch.path, path) + const methodMatch = matchValue(mockDispatch.method, method) + const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true + const headersMatch = matchHeaders(mockDispatch, headers) + return pathMatch && methodMatch && bodyMatch && headersMatch +} - return dict +function getResponseData (data) { + if (Buffer.isBuffer(data)) { + return data + } else if (typeof data === 'object') { + return JSON.stringify(data) + } else { + return data.toString() } } -webidl.nullableConverter = function (converter) { - return (V) => { - if (V === null) { - return V - } +function getMockDispatch (mockDispatches, key) { + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath - return converter(V) + // Match path + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) } -} -// https://webidl.spec.whatwg.org/#es-DOMString -webidl.converters.DOMString = function (V, opts = {}) { - // 1. If V is null and the conversion is to an IDL type - // associated with the [LegacyNullToEmptyString] - // extended attribute, then return the DOMString value - // that represents the empty string. - if (V === null && opts.legacyNullToEmptyString) { - return '' + // Match method + matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) } - // 2. Let x be ? ToString(V). - if (typeof V === 'symbol') { - throw new TypeError('Could not convert argument of type symbol to string.') + // Match body + matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) } - // 3. Return the IDL DOMString value that represents the - // same sequence of code units as the one the - // ECMAScript String value x represents. - return String(V) + // Match headers + matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + } + + return matchedMockDispatches[0] } -// https://webidl.spec.whatwg.org/#es-ByteString -webidl.converters.ByteString = function (V) { - // 1. Let x be ? ToString(V). - // Note: DOMString converter perform ? ToString(V) - const x = webidl.converters.DOMString(V) +function addMockDispatch (mockDispatches, key, data) { + const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } + const replyData = typeof data === 'function' ? { callback: data } : { ...data } + const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } + mockDispatches.push(newMockDispatch) + return newMockDispatch +} - // 2. If the value of any element of x is greater than - // 255, then throw a TypeError. - for (let index = 0; index < x.length; index++) { - if (x.charCodeAt(index) > 255) { - throw new TypeError( - 'Cannot convert argument to a ByteString because the character at ' + - `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` - ) +function deleteMockDispatch (mockDispatches, key) { + const index = mockDispatches.findIndex(dispatch => { + if (!dispatch.consumed) { + return false } + return matchKey(dispatch, key) + }) + if (index !== -1) { + mockDispatches.splice(index, 1) } - - // 3. Return an IDL ByteString value whose length is the - // length of x, and where the value of each element is - // the value of the corresponding element of x. - return x } -// https://webidl.spec.whatwg.org/#es-USVString -webidl.converters.USVString = toUSVString - -// https://webidl.spec.whatwg.org/#es-boolean -webidl.converters.boolean = function (V) { - // 1. Let x be the result of computing ToBoolean(V). - const x = Boolean(V) - - // 2. Return the IDL boolean value that is the one that represents - // the same truth value as the ECMAScript Boolean value x. - return x +function buildKey (opts) { + const { path, method, body, headers, query } = opts + return { + path, + method, + body, + headers, + query + } } -// https://webidl.spec.whatwg.org/#es-any -webidl.converters.any = function (V) { - return V +function generateKeyValues (data) { + return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ + ...keyValuePairs, + Buffer.from(`${key}`), + Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) + ], []) } -// https://webidl.spec.whatwg.org/#es-long-long -webidl.converters['long long'] = function (V) { - // 1. Let x be ? ConvertToInt(V, 64, "signed"). - const x = webidl.util.ConvertToInt(V, 64, 'signed') - - // 2. Return the IDL long long value that represents - // the same numeric value as x. - return x +/** + * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status + * @param {number} statusCode + */ +function getStatusText (statusCode) { + return STATUS_CODES[statusCode] || 'unknown' } -// https://webidl.spec.whatwg.org/#es-unsigned-long-long -webidl.converters['unsigned long long'] = function (V) { - // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). - const x = webidl.util.ConvertToInt(V, 64, 'unsigned') - - // 2. Return the IDL unsigned long long value that - // represents the same numeric value as x. - return x +async function getResponse (body) { + const buffers = [] + for await (const data of body) { + buffers.push(data) + } + return Buffer.concat(buffers).toString('utf8') } -// https://webidl.spec.whatwg.org/#es-unsigned-long -webidl.converters['unsigned long'] = function (V) { - // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). - const x = webidl.util.ConvertToInt(V, 32, 'unsigned') +/** + * Mock dispatch function used to simulate undici dispatches + */ +function mockDispatch (opts, handler) { + // Get mock dispatch from built key + const key = buildKey(opts) + const mockDispatch = getMockDispatch(this[kDispatches], key) - // 2. Return the IDL unsigned long value that - // represents the same numeric value as x. - return x -} + mockDispatch.timesInvoked++ -// https://webidl.spec.whatwg.org/#es-unsigned-short -webidl.converters['unsigned short'] = function (V, opts) { - // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). - const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) + // Here's where we resolve a callback if a callback is present for the dispatch data. + if (mockDispatch.data.callback) { + mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } + } - // 2. Return the IDL unsigned short value that represents - // the same numeric value as x. - return x -} + // Parse mockDispatch data + const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch + const { timesInvoked, times } = mockDispatch -// https://webidl.spec.whatwg.org/#idl-ArrayBuffer -webidl.converters.ArrayBuffer = function (V, opts = {}) { - // 1. If Type(V) is not Object, or V does not have an - // [[ArrayBufferData]] internal slot, then throw a - // TypeError. - // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances - // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances - if ( - webidl.util.Type(V) !== 'Object' || - !types.isAnyArrayBuffer(V) - ) { - throw webidl.errors.conversionFailed({ - prefix: `${V}`, - argument: `${V}`, - types: ['ArrayBuffer'] - }) - } + // If it's used up and not persistent, mark as consumed + mockDispatch.consumed = !persist && timesInvoked >= times + mockDispatch.pending = timesInvoked < times - // 2. If the conversion is not to an IDL type associated - // with the [AllowShared] extended attribute, and - // IsSharedArrayBuffer(V) is true, then throw a - // TypeError. - if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { - throw webidl.errors.exception({ - header: 'ArrayBuffer', - message: 'SharedArrayBuffer is not allowed.' - }) + // If specified, trigger dispatch error + if (error !== null) { + deleteMockDispatch(this[kDispatches], key) + handler.onError(error) + return true } - // 3. If the conversion is not to an IDL type associated - // with the [AllowResizable] extended attribute, and - // IsResizableArrayBuffer(V) is true, then throw a - // TypeError. - // Note: resizable ArrayBuffers are currently a proposal. + // Handle the request with a delay if necessary + if (typeof delay === 'number' && delay > 0) { + setTimeout(() => { + handleReply(this[kDispatches]) + }, delay) + } else { + handleReply(this[kDispatches]) + } - // 4. Return the IDL ArrayBuffer value that is a - // reference to the same object as V. - return V -} + function handleReply (mockDispatches, _data = data) { + // fetch's HeadersList is a 1D string array + const optsHeaders = Array.isArray(opts.headers) + ? buildHeadersFromArray(opts.headers) + : opts.headers + const body = typeof _data === 'function' + ? _data({ ...opts, headers: optsHeaders }) + : _data -webidl.converters.TypedArray = function (V, T, opts = {}) { - // 1. Let T be the IDL type V is being converted to. + // util.types.isPromise is likely needed for jest. + if (isPromise(body)) { + // If handleReply is asynchronous, throwing an error + // in the callback will reject the promise, rather than + // synchronously throw the error, which breaks some tests. + // Rather, we wait for the callback to resolve if it is a + // promise, and then re-run handleReply with the new body. + body.then((newData) => handleReply(mockDispatches, newData)) + return + } - // 2. If Type(V) is not Object, or V does not have a - // [[TypedArrayName]] internal slot with a value - // equal to T’s name, then throw a TypeError. - if ( - webidl.util.Type(V) !== 'Object' || - !types.isTypedArray(V) || - V.constructor.name !== T.name - ) { - throw webidl.errors.conversionFailed({ - prefix: `${T.name}`, - argument: `${V}`, - types: [T.name] - }) - } + const responseData = getResponseData(body) + const responseHeaders = generateKeyValues(headers) + const responseTrailers = generateKeyValues(trailers) - // 3. If the conversion is not to an IDL type associated - // with the [AllowShared] extended attribute, and - // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is - // true, then throw a TypeError. - if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { - throw webidl.errors.exception({ - header: 'ArrayBuffer', - message: 'SharedArrayBuffer is not allowed.' - }) + handler.abort = nop + handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) + handler.onData(Buffer.from(responseData)) + handler.onComplete(responseTrailers) + deleteMockDispatch(mockDispatches, key) } - // 4. If the conversion is not to an IDL type associated - // with the [AllowResizable] extended attribute, and - // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is - // true, then throw a TypeError. - // Note: resizable array buffers are currently a proposal + function resume () {} - // 5. Return the IDL value of type T that is a reference - // to the same object as V. - return V + return true } -webidl.converters.DataView = function (V, opts = {}) { - // 1. If Type(V) is not Object, or V does not have a - // [[DataView]] internal slot, then throw a TypeError. - if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { - throw webidl.errors.exception({ - header: 'DataView', - message: 'Object is not a DataView.' - }) - } +function buildMockDispatch () { + const agent = this[kMockAgent] + const origin = this[kOrigin] + const originalDispatch = this[kOriginalDispatch] - // 2. If the conversion is not to an IDL type associated - // with the [AllowShared] extended attribute, and - // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, - // then throw a TypeError. - if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { - throw webidl.errors.exception({ - header: 'ArrayBuffer', - message: 'SharedArrayBuffer is not allowed.' - }) + return function dispatch (opts, handler) { + if (agent.isMockActive) { + try { + mockDispatch.call(this, opts, handler) + } catch (error) { + if (error instanceof MockNotMatchedError) { + const netConnect = agent[kGetNetConnect]() + if (netConnect === false) { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) + } + if (checkNetConnect(netConnect, origin)) { + originalDispatch.call(this, opts, handler) + } else { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) + } + } else { + throw error + } + } + } else { + originalDispatch.call(this, opts, handler) + } } - - // 3. If the conversion is not to an IDL type associated - // with the [AllowResizable] extended attribute, and - // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is - // true, then throw a TypeError. - // Note: resizable ArrayBuffers are currently a proposal - - // 4. Return the IDL DataView value that is a reference - // to the same object as V. - return V } -// https://webidl.spec.whatwg.org/#BufferSource -webidl.converters.BufferSource = function (V, opts = {}) { - if (types.isAnyArrayBuffer(V)) { - return webidl.converters.ArrayBuffer(V, opts) - } - - if (types.isTypedArray(V)) { - return webidl.converters.TypedArray(V, V.constructor) +function checkNetConnect (netConnect, origin) { + const url = new URL(origin) + if (netConnect === true) { + return true + } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { + return true } + return false +} - if (types.isDataView(V)) { - return webidl.converters.DataView(V, opts) +function buildMockOptions (opts) { + if (opts) { + const { agent, ...mockOptions } = opts + return mockOptions } - - throw new TypeError(`Could not convert ${V} to a BufferSource.`) } -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.ByteString -) - -webidl.converters['sequence>'] = webidl.sequenceConverter( - webidl.converters['sequence'] -) - -webidl.converters['record'] = webidl.recordConverter( - webidl.converters.ByteString, - webidl.converters.ByteString -) - module.exports = { - webidl + getResponseData, + getMockDispatch, + addMockDispatch, + deleteMockDispatch, + buildKey, + generateKeyValues, + matchValue, + getResponse, + getStatusText, + mockDispatch, + buildMockDispatch, + checkNetConnect, + buildMockOptions, + getHeaderByName } /***/ }), -/***/ 4854: -/***/ ((module) => { +/***/ 86823: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +const { Transform } = __nccwpck_require__(12781) +const { Console } = __nccwpck_require__(96206) + /** - * @see https://encoding.spec.whatwg.org/#concept-encoding-get - * @param {string|undefined} label + * Gets the output of `console.table(…)` as a string. */ -function getEncoding (label) { - if (!label) { - return 'failure' - } +module.exports = class PendingInterceptorsFormatter { + constructor ({ disableColors } = {}) { + this.transform = new Transform({ + transform (chunk, _enc, cb) { + cb(null, chunk) + } + }) - // 1. Remove any leading and trailing ASCII whitespace from label. - // 2. If label is an ASCII case-insensitive match for any of the - // labels listed in the table below, then return the - // corresponding encoding; otherwise return failure. - switch (label.trim().toLowerCase()) { - case 'unicode-1-1-utf-8': - case 'unicode11utf8': - case 'unicode20utf8': - case 'utf-8': - case 'utf8': - case 'x-unicode20utf8': - return 'UTF-8' - case '866': - case 'cp866': - case 'csibm866': - case 'ibm866': - return 'IBM866' - case 'csisolatin2': - case 'iso-8859-2': - case 'iso-ir-101': - case 'iso8859-2': - case 'iso88592': - case 'iso_8859-2': - case 'iso_8859-2:1987': - case 'l2': - case 'latin2': - return 'ISO-8859-2' - case 'csisolatin3': - case 'iso-8859-3': - case 'iso-ir-109': - case 'iso8859-3': - case 'iso88593': - case 'iso_8859-3': - case 'iso_8859-3:1988': - case 'l3': - case 'latin3': - return 'ISO-8859-3' - case 'csisolatin4': - case 'iso-8859-4': - case 'iso-ir-110': - case 'iso8859-4': - case 'iso88594': - case 'iso_8859-4': - case 'iso_8859-4:1988': - case 'l4': - case 'latin4': - return 'ISO-8859-4' - case 'csisolatincyrillic': - case 'cyrillic': - case 'iso-8859-5': - case 'iso-ir-144': - case 'iso8859-5': - case 'iso88595': - case 'iso_8859-5': - case 'iso_8859-5:1988': - return 'ISO-8859-5' - case 'arabic': - case 'asmo-708': - case 'csiso88596e': - case 'csiso88596i': - case 'csisolatinarabic': - case 'ecma-114': - case 'iso-8859-6': - case 'iso-8859-6-e': - case 'iso-8859-6-i': - case 'iso-ir-127': - case 'iso8859-6': - case 'iso88596': - case 'iso_8859-6': - case 'iso_8859-6:1987': - return 'ISO-8859-6' - case 'csisolatingreek': - case 'ecma-118': - case 'elot_928': - case 'greek': - case 'greek8': - case 'iso-8859-7': - case 'iso-ir-126': - case 'iso8859-7': - case 'iso88597': - case 'iso_8859-7': - case 'iso_8859-7:1987': - case 'sun_eu_greek': - return 'ISO-8859-7' - case 'csiso88598e': - case 'csisolatinhebrew': - case 'hebrew': - case 'iso-8859-8': - case 'iso-8859-8-e': - case 'iso-ir-138': - case 'iso8859-8': - case 'iso88598': - case 'iso_8859-8': - case 'iso_8859-8:1988': - case 'visual': - return 'ISO-8859-8' - case 'csiso88598i': - case 'iso-8859-8-i': - case 'logical': - return 'ISO-8859-8-I' - case 'csisolatin6': - case 'iso-8859-10': - case 'iso-ir-157': - case 'iso8859-10': - case 'iso885910': - case 'l6': - case 'latin6': - return 'ISO-8859-10' - case 'iso-8859-13': - case 'iso8859-13': - case 'iso885913': - return 'ISO-8859-13' - case 'iso-8859-14': - case 'iso8859-14': - case 'iso885914': - return 'ISO-8859-14' - case 'csisolatin9': - case 'iso-8859-15': - case 'iso8859-15': - case 'iso885915': - case 'iso_8859-15': - case 'l9': - return 'ISO-8859-15' - case 'iso-8859-16': - return 'ISO-8859-16' - case 'cskoi8r': - case 'koi': - case 'koi8': - case 'koi8-r': - case 'koi8_r': - return 'KOI8-R' - case 'koi8-ru': - case 'koi8-u': - return 'KOI8-U' - case 'csmacintosh': - case 'mac': - case 'macintosh': - case 'x-mac-roman': - return 'macintosh' - case 'iso-8859-11': - case 'iso8859-11': - case 'iso885911': - case 'tis-620': - case 'windows-874': - return 'windows-874' - case 'cp1250': - case 'windows-1250': - case 'x-cp1250': - return 'windows-1250' - case 'cp1251': - case 'windows-1251': - case 'x-cp1251': - return 'windows-1251' - case 'ansi_x3.4-1968': - case 'ascii': - case 'cp1252': - case 'cp819': - case 'csisolatin1': - case 'ibm819': - case 'iso-8859-1': - case 'iso-ir-100': - case 'iso8859-1': - case 'iso88591': - case 'iso_8859-1': - case 'iso_8859-1:1987': - case 'l1': - case 'latin1': - case 'us-ascii': - case 'windows-1252': - case 'x-cp1252': - return 'windows-1252' - case 'cp1253': - case 'windows-1253': - case 'x-cp1253': - return 'windows-1253' - case 'cp1254': - case 'csisolatin5': - case 'iso-8859-9': - case 'iso-ir-148': - case 'iso8859-9': - case 'iso88599': - case 'iso_8859-9': - case 'iso_8859-9:1989': - case 'l5': - case 'latin5': - case 'windows-1254': - case 'x-cp1254': - return 'windows-1254' - case 'cp1255': - case 'windows-1255': - case 'x-cp1255': - return 'windows-1255' - case 'cp1256': - case 'windows-1256': - case 'x-cp1256': - return 'windows-1256' - case 'cp1257': - case 'windows-1257': - case 'x-cp1257': - return 'windows-1257' - case 'cp1258': - case 'windows-1258': - case 'x-cp1258': - return 'windows-1258' - case 'x-mac-cyrillic': - case 'x-mac-ukrainian': - return 'x-mac-cyrillic' - case 'chinese': - case 'csgb2312': - case 'csiso58gb231280': - case 'gb2312': - case 'gb_2312': - case 'gb_2312-80': - case 'gbk': - case 'iso-ir-58': - case 'x-gbk': - return 'GBK' - case 'gb18030': - return 'gb18030' - case 'big5': - case 'big5-hkscs': - case 'cn-big5': - case 'csbig5': - case 'x-x-big5': - return 'Big5' - case 'cseucpkdfmtjapanese': - case 'euc-jp': - case 'x-euc-jp': - return 'EUC-JP' - case 'csiso2022jp': - case 'iso-2022-jp': - return 'ISO-2022-JP' - case 'csshiftjis': - case 'ms932': - case 'ms_kanji': - case 'shift-jis': - case 'shift_jis': - case 'sjis': - case 'windows-31j': - case 'x-sjis': - return 'Shift_JIS' - case 'cseuckr': - case 'csksc56011987': - case 'euc-kr': - case 'iso-ir-149': - case 'korean': - case 'ks_c_5601-1987': - case 'ks_c_5601-1989': - case 'ksc5601': - case 'ksc_5601': - case 'windows-949': - return 'EUC-KR' - case 'csiso2022kr': - case 'hz-gb-2312': - case 'iso-2022-cn': - case 'iso-2022-cn-ext': - case 'iso-2022-kr': - case 'replacement': - return 'replacement' - case 'unicodefffe': - case 'utf-16be': - return 'UTF-16BE' - case 'csunicode': - case 'iso-10646-ucs-2': - case 'ucs-2': - case 'unicode': - case 'unicodefeff': - case 'utf-16': - case 'utf-16le': - return 'UTF-16LE' - case 'x-user-defined': - return 'x-user-defined' - default: return 'failure' + this.logger = new Console({ + stdout: this.transform, + inspectOptions: { + colors: !disableColors && !process.env.CI + } + }) } -} -module.exports = { - getEncoding + format (pendingInterceptors) { + const withPrettyHeaders = pendingInterceptors.map( + ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + Method: method, + Origin: origin, + Path: path, + 'Status code': statusCode, + Persistent: persist ? '✅' : '❌', + Invocations: timesInvoked, + Remaining: persist ? Infinity : times - timesInvoked + })) + + this.logger.table(withPrettyHeaders) + return this.transform.read().toString() + } } /***/ }), -/***/ 1446: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 78891: +/***/ ((module) => { "use strict"; -const { - staticPropertyDescriptors, - readOperation, - fireAProgressEvent -} = __nccwpck_require__(7530) -const { - kState, - kError, - kResult, - kEvents, - kAborted -} = __nccwpck_require__(9054) -const { webidl } = __nccwpck_require__(1744) -const { kEnumerableProperty } = __nccwpck_require__(3983) +const singulars = { + pronoun: 'it', + is: 'is', + was: 'was', + this: 'this' +} -class FileReader extends EventTarget { - constructor () { - super() +const plurals = { + pronoun: 'they', + is: 'are', + was: 'were', + this: 'these' +} - this[kState] = 'empty' - this[kResult] = null - this[kError] = null - this[kEvents] = { - loadend: null, - error: null, - abort: null, - load: null, - progress: null, - loadstart: null - } +module.exports = class Pluralizer { + constructor (singular, plural) { + this.singular = singular + this.plural = plural } - /** - * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer - * @param {import('buffer').Blob} blob - */ - readAsArrayBuffer (blob) { - webidl.brandCheck(this, FileReader) + pluralize (count) { + const one = count === 1 + const keys = one ? singulars : plurals + const noun = one ? this.singular : this.plural + return { ...keys, count, noun } + } +} - webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) - blob = webidl.converters.Blob(blob, { strict: false }) +/***/ }), - // The readAsArrayBuffer(blob) method, when invoked, - // must initiate a read operation for blob with ArrayBuffer. - readOperation(this, blob, 'ArrayBuffer') - } +/***/ 68266: +/***/ ((module) => { - /** - * @see https://w3c.github.io/FileAPI/#readAsBinaryString - * @param {import('buffer').Blob} blob - */ - readAsBinaryString (blob) { - webidl.brandCheck(this, FileReader) +"use strict"; +/* eslint-disable */ + + + +// Extracted from node/lib/internal/fixed_queue.js + +// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. +const kSize = 2048; +const kMask = kSize - 1; + +// The FixedQueue is implemented as a singly-linked list of fixed-size +// circular buffers. It looks something like this: +// +// head tail +// | | +// v v +// +-----------+ <-----\ +-----------+ <------\ +-----------+ +// | [null] | \----- | next | \------- | next | +// +-----------+ +-----------+ +-----------+ +// | item | <-- bottom | item | <-- bottom | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | bottom --> | item | +// | item | | item | | item | +// | ... | | ... | | ... | +// | item | | item | | item | +// | item | | item | | item | +// | [empty] | <-- top | item | | item | +// | [empty] | | item | | item | +// | [empty] | | [empty] | <-- top top --> | [empty] | +// +-----------+ +-----------+ +-----------+ +// +// Or, if there is only one circular buffer, it looks something +// like either of these: +// +// head tail head tail +// | | | | +// v v v v +// +-----------+ +-----------+ +// | [null] | | [null] | +// +-----------+ +-----------+ +// | [empty] | | item | +// | [empty] | | item | +// | item | <-- bottom top --> | [empty] | +// | item | | [empty] | +// | [empty] | <-- top bottom --> | item | +// | [empty] | | item | +// +-----------+ +-----------+ +// +// Adding a value means moving `top` forward by one, removing means +// moving `bottom` forward by one. After reaching the end, the queue +// wraps around. +// +// When `top === bottom` the current queue is empty and when +// `top + 1 === bottom` it's full. This wastes a single space of storage +// but allows much quicker checks. + +class FixedCircularBuffer { + constructor() { + this.bottom = 0; + this.top = 0; + this.list = new Array(kSize); + this.next = null; + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) + isEmpty() { + return this.top === this.bottom; + } - blob = webidl.converters.Blob(blob, { strict: false }) + isFull() { + return ((this.top + 1) & kMask) === this.bottom; + } - // The readAsBinaryString(blob) method, when invoked, - // must initiate a read operation for blob with BinaryString. - readOperation(this, blob, 'BinaryString') + push(data) { + this.list[this.top] = data; + this.top = (this.top + 1) & kMask; } - /** - * @see https://w3c.github.io/FileAPI/#readAsDataText - * @param {import('buffer').Blob} blob - * @param {string?} encoding - */ - readAsText (blob, encoding = undefined) { - webidl.brandCheck(this, FileReader) + shift() { + const nextItem = this.list[this.bottom]; + if (nextItem === undefined) + return null; + this.list[this.bottom] = undefined; + this.bottom = (this.bottom + 1) & kMask; + return nextItem; + } +} - webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) +module.exports = class FixedQueue { + constructor() { + this.head = this.tail = new FixedCircularBuffer(); + } - blob = webidl.converters.Blob(blob, { strict: false }) + isEmpty() { + return this.head.isEmpty(); + } - if (encoding !== undefined) { - encoding = webidl.converters.DOMString(encoding) + push(data) { + if (this.head.isFull()) { + // Head is full: Creates a new queue, sets the old queue's `.next` to it, + // and sets it as the new main queue. + this.head = this.head.next = new FixedCircularBuffer(); } - - // The readAsText(blob, encoding) method, when invoked, - // must initiate a read operation for blob with Text and encoding. - readOperation(this, blob, 'Text', encoding) + this.head.push(data); } - /** - * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL - * @param {import('buffer').Blob} blob - */ - readAsDataURL (blob) { - webidl.brandCheck(this, FileReader) - - webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) - - blob = webidl.converters.Blob(blob, { strict: false }) - - // The readAsDataURL(blob) method, when invoked, must - // initiate a read operation for blob with DataURL. - readOperation(this, blob, 'DataURL') + shift() { + const tail = this.tail; + const next = tail.shift(); + if (tail.isEmpty() && tail.next !== null) { + // If there is another queue, it forms the new tail. + this.tail = tail.next; + } + return next; } +}; - /** - * @see https://w3c.github.io/FileAPI/#dfn-abort - */ - abort () { - // 1. If this's state is "empty" or if this's state is - // "done" set this's result to null and terminate - // this algorithm. - if (this[kState] === 'empty' || this[kState] === 'done') { - this[kResult] = null - return - } - // 2. If this's state is "loading" set this's state to - // "done" and set this's result to null. - if (this[kState] === 'loading') { - this[kState] = 'done' - this[kResult] = null - } +/***/ }), - // 3. If there are any tasks from this on the file reading - // task source in an affiliated task queue, then remove - // those tasks from that task queue. - this[kAborted] = true +/***/ 73198: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 4. Terminate the algorithm for the read method being processed. - // TODO +"use strict"; - // 5. Fire a progress event called abort at this. - fireAProgressEvent('abort', this) - // 6. If this's state is not "loading", fire a progress - // event called loadend at this. - if (this[kState] !== 'loading') { - fireAProgressEvent('loadend', this) - } - } +const DispatcherBase = __nccwpck_require__(74839) +const FixedQueue = __nccwpck_require__(68266) +const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(72785) +const PoolStats = __nccwpck_require__(39689) - /** - * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate - */ - get readyState () { - webidl.brandCheck(this, FileReader) +const kClients = Symbol('clients') +const kNeedDrain = Symbol('needDrain') +const kQueue = Symbol('queue') +const kClosedResolve = Symbol('closed resolve') +const kOnDrain = Symbol('onDrain') +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kGetDispatcher = Symbol('get dispatcher') +const kAddClient = Symbol('add client') +const kRemoveClient = Symbol('remove client') +const kStats = Symbol('stats') - switch (this[kState]) { - case 'empty': return this.EMPTY - case 'loading': return this.LOADING - case 'done': return this.DONE - } - } +class PoolBase extends DispatcherBase { + constructor () { + super() - /** - * @see https://w3c.github.io/FileAPI/#dom-filereader-result - */ - get result () { - webidl.brandCheck(this, FileReader) + this[kQueue] = new FixedQueue() + this[kClients] = [] + this[kQueued] = 0 - // The result attribute’s getter, when invoked, must return - // this's result. - return this[kResult] - } + const pool = this - /** - * @see https://w3c.github.io/FileAPI/#dom-filereader-error - */ - get error () { - webidl.brandCheck(this, FileReader) + this[kOnDrain] = function onDrain (origin, targets) { + const queue = pool[kQueue] - // The error attribute’s getter, when invoked, must return - // this's error. - return this[kError] - } + let needDrain = false - get onloadend () { - webidl.brandCheck(this, FileReader) + while (!needDrain) { + const item = queue.shift() + if (!item) { + break + } + pool[kQueued]-- + needDrain = !this.dispatch(item.opts, item.handler) + } - return this[kEvents].loadend - } + this[kNeedDrain] = needDrain - set onloadend (fn) { - webidl.brandCheck(this, FileReader) + if (!this[kNeedDrain] && pool[kNeedDrain]) { + pool[kNeedDrain] = false + pool.emit('drain', origin, [pool, ...targets]) + } - if (this[kEvents].loadend) { - this.removeEventListener('loadend', this[kEvents].loadend) + if (pool[kClosedResolve] && queue.isEmpty()) { + Promise + .all(pool[kClients].map(c => c.close())) + .then(pool[kClosedResolve]) + } } - if (typeof fn === 'function') { - this[kEvents].loadend = fn - this.addEventListener('loadend', fn) - } else { - this[kEvents].loadend = null + this[kOnConnect] = (origin, targets) => { + pool.emit('connect', origin, [pool, ...targets]) } - } - - get onerror () { - webidl.brandCheck(this, FileReader) - - return this[kEvents].error - } - - set onerror (fn) { - webidl.brandCheck(this, FileReader) - if (this[kEvents].error) { - this.removeEventListener('error', this[kEvents].error) + this[kOnDisconnect] = (origin, targets, err) => { + pool.emit('disconnect', origin, [pool, ...targets], err) } - if (typeof fn === 'function') { - this[kEvents].error = fn - this.addEventListener('error', fn) - } else { - this[kEvents].error = null + this[kOnConnectionError] = (origin, targets, err) => { + pool.emit('connectionError', origin, [pool, ...targets], err) } + + this[kStats] = new PoolStats(this) } - get onloadstart () { - webidl.brandCheck(this, FileReader) + get [kBusy] () { + return this[kNeedDrain] + } - return this[kEvents].loadstart + get [kConnected] () { + return this[kClients].filter(client => client[kConnected]).length } - set onloadstart (fn) { - webidl.brandCheck(this, FileReader) + get [kFree] () { + return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length + } - if (this[kEvents].loadstart) { - this.removeEventListener('loadstart', this[kEvents].loadstart) + get [kPending] () { + let ret = this[kQueued] + for (const { [kPending]: pending } of this[kClients]) { + ret += pending } + return ret + } - if (typeof fn === 'function') { - this[kEvents].loadstart = fn - this.addEventListener('loadstart', fn) - } else { - this[kEvents].loadstart = null + get [kRunning] () { + let ret = 0 + for (const { [kRunning]: running } of this[kClients]) { + ret += running } + return ret } - get onprogress () { - webidl.brandCheck(this, FileReader) - - return this[kEvents].progress + get [kSize] () { + let ret = this[kQueued] + for (const { [kSize]: size } of this[kClients]) { + ret += size + } + return ret } - set onprogress (fn) { - webidl.brandCheck(this, FileReader) - - if (this[kEvents].progress) { - this.removeEventListener('progress', this[kEvents].progress) - } + get stats () { + return this[kStats] + } - if (typeof fn === 'function') { - this[kEvents].progress = fn - this.addEventListener('progress', fn) + async [kClose] () { + if (this[kQueue].isEmpty()) { + return Promise.all(this[kClients].map(c => c.close())) } else { - this[kEvents].progress = null + return new Promise((resolve) => { + this[kClosedResolve] = resolve + }) } } - get onload () { - webidl.brandCheck(this, FileReader) + async [kDestroy] (err) { + while (true) { + const item = this[kQueue].shift() + if (!item) { + break + } + item.handler.onError(err) + } - return this[kEvents].load + return Promise.all(this[kClients].map(c => c.destroy(err))) } - set onload (fn) { - webidl.brandCheck(this, FileReader) + [kDispatch] (opts, handler) { + const dispatcher = this[kGetDispatcher]() - if (this[kEvents].load) { - this.removeEventListener('load', this[kEvents].load) + if (!dispatcher) { + this[kNeedDrain] = true + this[kQueue].push({ opts, handler }) + this[kQueued]++ + } else if (!dispatcher.dispatch(opts, handler)) { + dispatcher[kNeedDrain] = true + this[kNeedDrain] = !this[kGetDispatcher]() } - if (typeof fn === 'function') { - this[kEvents].load = fn - this.addEventListener('load', fn) - } else { - this[kEvents].load = null - } + return !this[kNeedDrain] } - get onabort () { - webidl.brandCheck(this, FileReader) - - return this[kEvents].abort - } + [kAddClient] (client) { + client + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) - set onabort (fn) { - webidl.brandCheck(this, FileReader) + this[kClients].push(client) - if (this[kEvents].abort) { - this.removeEventListener('abort', this[kEvents].abort) + if (this[kNeedDrain]) { + process.nextTick(() => { + if (this[kNeedDrain]) { + this[kOnDrain](client[kUrl], [this, client]) + } + }) } - if (typeof fn === 'function') { - this[kEvents].abort = fn - this.addEventListener('abort', fn) - } else { - this[kEvents].abort = null - } + return this } -} -// https://w3c.github.io/FileAPI/#dom-filereader-empty -FileReader.EMPTY = FileReader.prototype.EMPTY = 0 -// https://w3c.github.io/FileAPI/#dom-filereader-loading -FileReader.LOADING = FileReader.prototype.LOADING = 1 -// https://w3c.github.io/FileAPI/#dom-filereader-done -FileReader.DONE = FileReader.prototype.DONE = 2 + [kRemoveClient] (client) { + client.close(() => { + const idx = this[kClients].indexOf(client) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + }) -Object.defineProperties(FileReader.prototype, { - EMPTY: staticPropertyDescriptors, - LOADING: staticPropertyDescriptors, - DONE: staticPropertyDescriptors, - readAsArrayBuffer: kEnumerableProperty, - readAsBinaryString: kEnumerableProperty, - readAsText: kEnumerableProperty, - readAsDataURL: kEnumerableProperty, - abort: kEnumerableProperty, - readyState: kEnumerableProperty, - result: kEnumerableProperty, - error: kEnumerableProperty, - onloadstart: kEnumerableProperty, - onprogress: kEnumerableProperty, - onload: kEnumerableProperty, - onabort: kEnumerableProperty, - onerror: kEnumerableProperty, - onloadend: kEnumerableProperty, - [Symbol.toStringTag]: { - value: 'FileReader', - writable: false, - enumerable: false, - configurable: true + this[kNeedDrain] = this[kClients].some(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) } -}) - -Object.defineProperties(FileReader, { - EMPTY: staticPropertyDescriptors, - LOADING: staticPropertyDescriptors, - DONE: staticPropertyDescriptors -}) +} module.exports = { - FileReader + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher } /***/ }), -/***/ 5504: +/***/ 39689: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - +const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(72785) +const kPool = Symbol('pool') -const { webidl } = __nccwpck_require__(1744) +class PoolStats { + constructor (pool) { + this[kPool] = pool + } -const kState = Symbol('ProgressEvent state') + get connected () { + return this[kPool][kConnected] + } -/** - * @see https://xhr.spec.whatwg.org/#progressevent - */ -class ProgressEvent extends Event { - constructor (type, eventInitDict = {}) { - type = webidl.converters.DOMString(type) - eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) + get free () { + return this[kPool][kFree] + } - super(type, eventInitDict) + get pending () { + return this[kPool][kPending] + } - this[kState] = { - lengthComputable: eventInitDict.lengthComputable, - loaded: eventInitDict.loaded, - total: eventInitDict.total - } + get queued () { + return this[kPool][kQueued] } - get lengthComputable () { - webidl.brandCheck(this, ProgressEvent) + get running () { + return this[kPool][kRunning] + } - return this[kState].lengthComputable + get size () { + return this[kPool][kSize] } +} - get loaded () { - webidl.brandCheck(this, ProgressEvent) +module.exports = PoolStats - return this[kState].loaded - } - get total () { - webidl.brandCheck(this, ProgressEvent) +/***/ }), - return this[kState].total - } +/***/ 4634: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kGetDispatcher +} = __nccwpck_require__(73198) +const Client = __nccwpck_require__(33598) +const { + InvalidArgumentError +} = __nccwpck_require__(48045) +const util = __nccwpck_require__(83983) +const { kUrl, kInterceptors } = __nccwpck_require__(72785) +const buildConnector = __nccwpck_require__(82067) + +const kOptions = Symbol('options') +const kConnections = Symbol('connections') +const kFactory = Symbol('factory') + +function defaultFactory (origin, opts) { + return new Client(origin, opts) } -webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ - { - key: 'lengthComputable', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'loaded', - converter: webidl.converters['unsigned long long'], - defaultValue: 0 - }, - { - key: 'total', - converter: webidl.converters['unsigned long long'], - defaultValue: 0 - }, - { - key: 'bubbles', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'cancelable', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'composed', - converter: webidl.converters.boolean, - defaultValue: false - } -]) +class Pool extends PoolBase { + constructor (origin, { + connections, + factory = defaultFactory, + connect, + connectTimeout, + tls, + maxCachedSessions, + socketPath, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + allowH2, + ...options + } = {}) { + super() -module.exports = { - ProgressEvent -} + if (connections != null && (!Number.isFinite(connections) || connections < 0)) { + throw new InvalidArgumentError('invalid connections') + } + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } -/***/ }), + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } -/***/ 9054: -/***/ ((module) => { + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } -"use strict"; + this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) + ? options.interceptors.Pool + : [] + this[kConnections] = connections || null + this[kUrl] = util.parseOrigin(origin) + this[kOptions] = { ...util.deepClone(options), connect, allowH2 } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kFactory] = factory + } + [kGetDispatcher] () { + let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) -module.exports = { - kState: Symbol('FileReader state'), - kResult: Symbol('FileReader result'), - kError: Symbol('FileReader error'), - kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), - kEvents: Symbol('FileReader events'), - kAborted: Symbol('FileReader aborted') + if (dispatcher) { + return dispatcher + } + + if (!this[kConnections] || this[kClients].length < this[kConnections]) { + dispatcher = this[kFactory](this[kUrl], this[kOptions]) + this[kAddClient](dispatcher) + } + + return dispatcher + } } +module.exports = Pool + /***/ }), -/***/ 7530: +/***/ 97858: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { - kState, - kError, - kResult, - kAborted, - kLastProgressEventFired -} = __nccwpck_require__(9054) -const { ProgressEvent } = __nccwpck_require__(5504) -const { getEncoding } = __nccwpck_require__(4854) -const { DOMException } = __nccwpck_require__(1037) -const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(685) -const { types } = __nccwpck_require__(3837) -const { StringDecoder } = __nccwpck_require__(1576) -const { btoa } = __nccwpck_require__(4300) +const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(72785) +const { URL } = __nccwpck_require__(57310) +const Agent = __nccwpck_require__(7890) +const Pool = __nccwpck_require__(4634) +const DispatcherBase = __nccwpck_require__(74839) +const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(48045) +const buildConnector = __nccwpck_require__(82067) -/** @type {PropertyDescriptor} */ -const staticPropertyDescriptors = { - enumerable: true, - writable: false, - configurable: false +const kAgent = Symbol('proxy agent') +const kClient = Symbol('proxy client') +const kProxyHeaders = Symbol('proxy headers') +const kRequestTls = Symbol('request tls settings') +const kProxyTls = Symbol('proxy tls settings') +const kConnectEndpoint = Symbol('connect endpoint function') + +function defaultProtocolPort (protocol) { + return protocol === 'https:' ? 443 : 80 } -/** - * @see https://w3c.github.io/FileAPI/#readOperation - * @param {import('./filereader').FileReader} fr - * @param {import('buffer').Blob} blob - * @param {string} type - * @param {string?} encodingName - */ -function readOperation (fr, blob, type, encodingName) { - // 1. If fr’s state is "loading", throw an InvalidStateError - // DOMException. - if (fr[kState] === 'loading') { - throw new DOMException('Invalid state', 'InvalidStateError') +function buildProxyOptions (opts) { + if (typeof opts === 'string') { + opts = { uri: opts } } - // 2. Set fr’s state to "loading". - fr[kState] = 'loading' - - // 3. Set fr’s result to null. - fr[kResult] = null - - // 4. Set fr’s error to null. - fr[kError] = null - - // 5. Let stream be the result of calling get stream on blob. - /** @type {import('stream/web').ReadableStream} */ - const stream = blob.stream() - - // 6. Let reader be the result of getting a reader from stream. - const reader = stream.getReader() - - // 7. Let bytes be an empty byte sequence. - /** @type {Uint8Array[]} */ - const bytes = [] - - // 8. Let chunkPromise be the result of reading a chunk from - // stream with reader. - let chunkPromise = reader.read() - - // 9. Let isFirstChunk be true. - let isFirstChunk = true - - // 10. In parallel, while true: - // Note: "In parallel" just means non-blocking - // Note 2: readOperation itself cannot be async as double - // reading the body would then reject the promise, instead - // of throwing an error. - ;(async () => { - while (!fr[kAborted]) { - // 1. Wait for chunkPromise to be fulfilled or rejected. - try { - const { done, value } = await chunkPromise - - // 2. If chunkPromise is fulfilled, and isFirstChunk is - // true, queue a task to fire a progress event called - // loadstart at fr. - if (isFirstChunk && !fr[kAborted]) { - queueMicrotask(() => { - fireAProgressEvent('loadstart', fr) - }) - } - - // 3. Set isFirstChunk to false. - isFirstChunk = false - - // 4. If chunkPromise is fulfilled with an object whose - // done property is false and whose value property is - // a Uint8Array object, run these steps: - if (!done && types.isUint8Array(value)) { - // 1. Let bs be the byte sequence represented by the - // Uint8Array object. + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } - // 2. Append bs to bytes. - bytes.push(value) + return { + uri: opts.uri, + protocol: opts.protocol || 'https' + } +} - // 3. If roughly 50ms have passed since these steps - // were last invoked, queue a task to fire a - // progress event called progress at fr. - if ( - ( - fr[kLastProgressEventFired] === undefined || - Date.now() - fr[kLastProgressEventFired] >= 50 - ) && - !fr[kAborted] - ) { - fr[kLastProgressEventFired] = Date.now() - queueMicrotask(() => { - fireAProgressEvent('progress', fr) - }) - } +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} - // 4. Set chunkPromise to the result of reading a - // chunk from stream with reader. - chunkPromise = reader.read() - } else if (done) { - // 5. Otherwise, if chunkPromise is fulfilled with an - // object whose done property is true, queue a task - // to run the following steps and abort this algorithm: - queueMicrotask(() => { - // 1. Set fr’s state to "done". - fr[kState] = 'done' +class ProxyAgent extends DispatcherBase { + constructor (opts) { + super(opts) + this[kProxy] = buildProxyOptions(opts) + this[kAgent] = new Agent(opts) + this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) + ? opts.interceptors.ProxyAgent + : [] - // 2. Let result be the result of package data given - // bytes, type, blob’s type, and encodingName. - try { - const result = packageData(bytes, type, blob.type, encodingName) + if (typeof opts === 'string') { + opts = { uri: opts } + } - // 4. Else: + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } - if (fr[kAborted]) { - return - } + const { clientFactory = defaultFactory } = opts - // 1. Set fr’s result to result. - fr[kResult] = result + if (typeof clientFactory !== 'function') { + throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') + } - // 2. Fire a progress event called load at the fr. - fireAProgressEvent('load', fr) - } catch (error) { - // 3. If package data threw an exception error: + this[kRequestTls] = opts.requestTls + this[kProxyTls] = opts.proxyTls + this[kProxyHeaders] = opts.headers || {} - // 1. Set fr’s error to error. - fr[kError] = error + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl - // 2. Fire a progress event called error at fr. - fireAProgressEvent('error', fr) - } + if (opts.auth && opts.token) { + throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') + } else if (opts.auth) { + /* @deprecated in favour of opts.token */ + this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` + } else if (opts.token) { + this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` + } - // 5. If fr’s state is not "loading", fire a progress - // event called loadend at the fr. - if (fr[kState] !== 'loading') { - fireAProgressEvent('loadend', fr) + const connect = buildConnector({ ...opts.proxyTls }) + this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) + this[kClient] = clientFactory(resolvedUrl, { connect }) + this[kAgent] = new Agent({ + ...opts, + connect: async (opts, callback) => { + let requestedHost = opts.host + if (!opts.port) { + requestedHost += `:${defaultProtocolPort(opts.protocol)}` + } + try { + const { socket, statusCode } = await this[kClient].connect({ + origin, + port, + path: requestedHost, + signal: opts.signal, + headers: { + ...this[kProxyHeaders], + host } }) - - break - } - } catch (error) { - if (fr[kAborted]) { - return + if (statusCode !== 200) { + socket.on('error', () => {}).destroy() + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) + } + if (opts.protocol !== 'https:') { + callback(null, socket) + return + } + let servername + if (this[kRequestTls]) { + servername = this[kRequestTls].servername + } else { + servername = opts.servername + } + this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) + } catch (err) { + callback(err) } + } + }) + } - // 6. Otherwise, if chunkPromise is rejected with an - // error error, queue a task to run the following - // steps and abort this algorithm: - queueMicrotask(() => { - // 1. Set fr’s state to "done". - fr[kState] = 'done' - - // 2. Set fr’s error to error. - fr[kError] = error - - // 3. Fire a progress event called error at fr. - fireAProgressEvent('error', fr) + dispatch (opts, handler) { + const { host } = new URL(opts.origin) + const headers = buildHeaders(opts.headers) + throwIfProxyAuthIsSent(headers) + return this[kAgent].dispatch( + { + ...opts, + headers: { + ...headers, + host + } + }, + handler + ) + } - // 4. If fr’s state is not "loading", fire a progress - // event called loadend at fr. - if (fr[kState] !== 'loading') { - fireAProgressEvent('loadend', fr) - } - }) + async [kClose] () { + await this[kAgent].close() + await this[kClient].close() + } - break - } - } - })() + async [kDestroy] () { + await this[kAgent].destroy() + await this[kClient].destroy() + } } /** - * @see https://w3c.github.io/FileAPI/#fire-a-progress-event - * @see https://dom.spec.whatwg.org/#concept-event-fire - * @param {string} e The name of the event - * @param {import('./filereader').FileReader} reader + * @param {string[] | Record} headers + * @returns {Record} */ -function fireAProgressEvent (e, reader) { - // The progress event e does not bubble. e.bubbles must be false - // The progress event e is NOT cancelable. e.cancelable must be false - const event = new ProgressEvent(e, { - bubbles: false, - cancelable: false - }) +function buildHeaders (headers) { + // When using undici.fetch, the headers list is stored + // as an array. + if (Array.isArray(headers)) { + /** @type {Record} */ + const headersPair = {} - reader.dispatchEvent(event) + for (let i = 0; i < headers.length; i += 2) { + headersPair[headers[i]] = headers[i + 1] + } + + return headersPair + } + + return headers } /** - * @see https://w3c.github.io/FileAPI/#blob-package-data - * @param {Uint8Array[]} bytes - * @param {string} type - * @param {string?} mimeType - * @param {string?} encodingName + * @param {Record} headers + * + * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers + * Nevertheless, it was changed and to avoid a security vulnerability by end users + * this check was created. + * It should be removed in the next major version for performance reasons */ -function packageData (bytes, type, mimeType, encodingName) { - // 1. A Blob has an associated package data algorithm, given - // bytes, a type, a optional mimeType, and a optional - // encodingName, which switches on type and runs the - // associated steps: +function throwIfProxyAuthIsSent (headers) { + const existProxyAuth = headers && Object.keys(headers) + .find((key) => key.toLowerCase() === 'proxy-authorization') + if (existProxyAuth) { + throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') + } +} - switch (type) { - case 'DataURL': { - // 1. Return bytes as a DataURL [RFC2397] subject to - // the considerations below: - // * Use mimeType as part of the Data URL if it is - // available in keeping with the Data URL - // specification [RFC2397]. - // * If mimeType is not available return a Data URL - // without a media-type. [RFC2397]. +module.exports = ProxyAgent - // https://datatracker.ietf.org/doc/html/rfc2397#section-3 - // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data - // mediatype := [ type "/" subtype ] *( ";" parameter ) - // data := *urlchar - // parameter := attribute "=" value - let dataURL = 'data:' - const parsed = parseMIMEType(mimeType || 'application/octet-stream') +/***/ }), - if (parsed !== 'failure') { - dataURL += serializeAMimeType(parsed) - } +/***/ 29459: +/***/ ((module) => { - dataURL += ';base64,' +"use strict"; - const decoder = new StringDecoder('latin1') - for (const chunk of bytes) { - dataURL += btoa(decoder.write(chunk)) - } +let fastNow = Date.now() +let fastNowTimeout - dataURL += btoa(decoder.end()) +const fastTimers = [] - return dataURL - } - case 'Text': { - // 1. Let encoding be failure - let encoding = 'failure' +function onTimeout () { + fastNow = Date.now() - // 2. If the encodingName is present, set encoding to the - // result of getting an encoding from encodingName. - if (encodingName) { - encoding = getEncoding(encodingName) - } + let len = fastTimers.length + let idx = 0 + while (idx < len) { + const timer = fastTimers[idx] - // 3. If encoding is failure, and mimeType is present: - if (encoding === 'failure' && mimeType) { - // 1. Let type be the result of parse a MIME type - // given mimeType. - const type = parseMIMEType(mimeType) + if (timer.state === 0) { + timer.state = fastNow + timer.delay + } else if (timer.state > 0 && fastNow >= timer.state) { + timer.state = -1 + timer.callback(timer.opaque) + } - // 2. If type is not failure, set encoding to the result - // of getting an encoding from type’s parameters["charset"]. - if (type !== 'failure') { - encoding = getEncoding(type.parameters.get('charset')) - } + if (timer.state === -1) { + timer.state = -2 + if (idx !== len - 1) { + fastTimers[idx] = fastTimers.pop() + } else { + fastTimers.pop() } + len -= 1 + } else { + idx += 1 + } + } - // 4. If encoding is failure, then set encoding to UTF-8. - if (encoding === 'failure') { - encoding = 'UTF-8' - } + if (fastTimers.length > 0) { + refreshTimeout() + } +} - // 5. Decode bytes using fallback encoding encoding, and - // return the result. - return decode(bytes, encoding) +function refreshTimeout () { + if (fastNowTimeout && fastNowTimeout.refresh) { + fastNowTimeout.refresh() + } else { + clearTimeout(fastNowTimeout) + fastNowTimeout = setTimeout(onTimeout, 1e3) + if (fastNowTimeout.unref) { + fastNowTimeout.unref() } - case 'ArrayBuffer': { - // Return a new ArrayBuffer whose contents are bytes. - const sequence = combineByteSequences(bytes) + } +} - return sequence.buffer - } - case 'BinaryString': { - // Return bytes as a binary string, in which every byte - // is represented by a code unit of equal value [0..255]. - let binaryString = '' +class Timeout { + constructor (callback, delay, opaque) { + this.callback = callback + this.delay = delay + this.opaque = opaque - const decoder = new StringDecoder('latin1') + // -2 not in timer list + // -1 in timer list but inactive + // 0 in timer list waiting for time + // > 0 in timer list waiting for time to expire + this.state = -2 - for (const chunk of bytes) { - binaryString += decoder.write(chunk) + this.refresh() + } + + refresh () { + if (this.state === -2) { + fastTimers.push(this) + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() } + } - binaryString += decoder.end() + this.state = 0 + } - return binaryString - } + clear () { + this.state = -1 } } -/** - * @see https://encoding.spec.whatwg.org/#decode - * @param {Uint8Array[]} ioQueue - * @param {string} encoding - */ -function decode (ioQueue, encoding) { - const bytes = combineByteSequences(ioQueue) - - // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. - const BOMEncoding = BOMSniffing(bytes) +module.exports = { + setTimeout (callback, delay, opaque) { + return delay < 1e3 + ? setTimeout(callback, delay, opaque) + : new Timeout(callback, delay, opaque) + }, + clearTimeout (timeout) { + if (timeout instanceof Timeout) { + timeout.clear() + } else { + clearTimeout(timeout) + } + } +} - let slice = 0 - // 2. If BOMEncoding is non-null: - if (BOMEncoding !== null) { - // 1. Set encoding to BOMEncoding. - encoding = BOMEncoding +/***/ }), - // 2. Read three bytes from ioQueue, if BOMEncoding is - // UTF-8; otherwise read two bytes. - // (Do nothing with those bytes.) - slice = BOMEncoding === 'UTF-8' ? 3 : 2 - } +/***/ 35354: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 3. Process a queue with an instance of encoding’s - // decoder, ioQueue, output, and "replacement". +"use strict"; - // 4. Return output. - const sliced = bytes.slice(slice) - return new TextDecoder(encoding).decode(sliced) -} +const diagnosticsChannel = __nccwpck_require__(67643) +const { uid, states } = __nccwpck_require__(19188) +const { + kReadyState, + kSentClose, + kByteParser, + kReceivedClose +} = __nccwpck_require__(37578) +const { fireEvent, failWebsocketConnection } = __nccwpck_require__(25515) +const { CloseEvent } = __nccwpck_require__(52611) +const { makeRequest } = __nccwpck_require__(48359) +const { fetching } = __nccwpck_require__(74881) +const { Headers } = __nccwpck_require__(10554) +const { getGlobalDispatcher } = __nccwpck_require__(21892) +const { kHeadersList } = __nccwpck_require__(72785) -/** - * @see https://encoding.spec.whatwg.org/#bom-sniff - * @param {Uint8Array} ioQueue - */ -function BOMSniffing (ioQueue) { - // 1. Let BOM be the result of peeking 3 bytes from ioQueue, - // converted to a byte sequence. - const [a, b, c] = ioQueue +const channels = {} +channels.open = diagnosticsChannel.channel('undici:websocket:open') +channels.close = diagnosticsChannel.channel('undici:websocket:close') +channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') - // 2. For each of the rows in the table below, starting with - // the first one and going down, if BOM starts with the - // bytes given in the first column, then return the - // encoding given in the cell in the second column of that - // row. Otherwise, return null. - if (a === 0xEF && b === 0xBB && c === 0xBF) { - return 'UTF-8' - } else if (a === 0xFE && b === 0xFF) { - return 'UTF-16BE' - } else if (a === 0xFF && b === 0xFE) { - return 'UTF-16LE' - } +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6113) +} catch { - return null } /** - * @param {Uint8Array[]} sequences + * @see https://websockets.spec.whatwg.org/#concept-websocket-establish + * @param {URL} url + * @param {string|string[]} protocols + * @param {import('./websocket').WebSocket} ws + * @param {(response: any) => void} onEstablish + * @param {Partial} options */ -function combineByteSequences (sequences) { - const size = sequences.reduce((a, b) => { - return a + b.byteLength - }, 0) +function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { + // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s + // scheme is "ws", and to "https" otherwise. + const requestURL = url - let offset = 0 + requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' - return sequences.reduce((a, b) => { - a.set(b, offset) - offset += b.byteLength - return a - }, new Uint8Array(size)) -} + // 2. Let request be a new request, whose URL is requestURL, client is client, + // service-workers mode is "none", referrer is "no-referrer", mode is + // "websocket", credentials mode is "include", cache mode is "no-store" , + // and redirect mode is "error". + const request = makeRequest({ + urlList: [requestURL], + serviceWorkers: 'none', + referrer: 'no-referrer', + mode: 'websocket', + credentials: 'include', + cache: 'no-store', + redirect: 'error' + }) -module.exports = { - staticPropertyDescriptors, - readOperation, - fireAProgressEvent -} + // Note: undici extension, allow setting custom headers. + if (options.headers) { + const headersList = new Headers(options.headers)[kHeadersList] + request.headersList = headersList + } -/***/ }), + // 3. Append (`Upgrade`, `websocket`) to request’s header list. + // 4. Append (`Connection`, `Upgrade`) to request’s header list. + // Note: both of these are handled by undici currently. + // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 -/***/ 1892: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 5. Let keyValue be a nonce consisting of a randomly selected + // 16-byte value that has been forgiving-base64-encoded and + // isomorphic encoded. + const keyValue = crypto.randomBytes(16).toString('base64') -"use strict"; + // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s + // header list. + request.headersList.append('sec-websocket-key', keyValue) + // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s + // header list. + request.headersList.append('sec-websocket-version', '13') -// We include a version number for the Dispatcher API. In case of breaking changes, -// this version number must be increased to avoid conflicts. -const globalDispatcher = Symbol.for('undici.globalDispatcher.1') -const { InvalidArgumentError } = __nccwpck_require__(8045) -const Agent = __nccwpck_require__(7890) + // 8. For each protocol in protocols, combine + // (`Sec-WebSocket-Protocol`, protocol) in request’s header + // list. + for (const protocol of protocols) { + request.headersList.append('sec-websocket-protocol', protocol) + } -if (getGlobalDispatcher() === undefined) { - setGlobalDispatcher(new Agent()) -} + // 9. Let permessageDeflate be a user-agent defined + // "permessage-deflate" extension header value. + // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 + // TODO: enable once permessage-deflate is supported + const permessageDeflate = '' // 'permessage-deflate; 15' -function setGlobalDispatcher (agent) { - if (!agent || typeof agent.dispatch !== 'function') { - throw new InvalidArgumentError('Argument agent must implement Agent') - } - Object.defineProperty(globalThis, globalDispatcher, { - value: agent, - writable: true, - enumerable: false, - configurable: false - }) -} + // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to + // request’s header list. + // request.headersList.append('sec-websocket-extensions', permessageDeflate) -function getGlobalDispatcher () { - return globalThis[globalDispatcher] -} + // 11. Fetch request with useParallelQueue set to true, and + // processResponse given response being these steps: + const controller = fetching({ + request, + useParallelQueue: true, + dispatcher: options.dispatcher ?? getGlobalDispatcher(), + processResponse (response) { + // 1. If response is a network error or its status is not 101, + // fail the WebSocket connection. + if (response.type === 'error' || response.status !== 101) { + failWebsocketConnection(ws, 'Received network error or non-101 status code.') + return + } -module.exports = { - setGlobalDispatcher, - getGlobalDispatcher -} + // 2. If protocols is not the empty list and extracting header + // list values given `Sec-WebSocket-Protocol` and response’s + // header list results in null, failure, or the empty byte + // sequence, then fail the WebSocket connection. + if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Server did not respond with sent protocols.') + return + } + // 3. Follow the requirements stated step 2 to step 6, inclusive, + // of the last set of steps in section 4.1 of The WebSocket + // Protocol to validate response. This either results in fail + // the WebSocket connection or the WebSocket connection is + // established. -/***/ }), + // 2. If the response lacks an |Upgrade| header field or the |Upgrade| + // header field contains a value that is not an ASCII case- + // insensitive match for the value "websocket", the client MUST + // _Fail the WebSocket Connection_. + if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { + failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') + return + } -/***/ 6930: -/***/ ((module) => { + // 3. If the response lacks a |Connection| header field or the + // |Connection| header field doesn't contain a token that is an + // ASCII case-insensitive match for the value "Upgrade", the client + // MUST _Fail the WebSocket Connection_. + if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { + failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') + return + } -"use strict"; + // 4. If the response lacks a |Sec-WebSocket-Accept| header field or + // the |Sec-WebSocket-Accept| contains a value other than the + // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- + // Key| (as a string, not base64-decoded) with the string "258EAFA5- + // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and + // trailing whitespace, the client MUST _Fail the WebSocket + // Connection_. + const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') + const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') + if (secWSAccept !== digest) { + failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') + return + } + // 5. If the response includes a |Sec-WebSocket-Extensions| header + // field and this header field indicates the use of an extension + // that was not present in the client's handshake (the server has + // indicated an extension not requested by the client), the client + // MUST _Fail the WebSocket Connection_. (The parsing of this + // header field to determine which extensions are requested is + // discussed in Section 9.1.) + const secExtension = response.headersList.get('Sec-WebSocket-Extensions') -module.exports = class DecoratorHandler { - constructor (handler) { - this.handler = handler - } + if (secExtension !== null && secExtension !== permessageDeflate) { + failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') + return + } - onConnect (...args) { - return this.handler.onConnect(...args) - } + // 6. If the response includes a |Sec-WebSocket-Protocol| header field + // and this header field indicates the use of a subprotocol that was + // not present in the client's handshake (the server has indicated a + // subprotocol not requested by the client), the client MUST _Fail + // the WebSocket Connection_. + const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') - onError (...args) { - return this.handler.onError(...args) - } + if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') + return + } - onUpgrade (...args) { - return this.handler.onUpgrade(...args) - } + response.socket.on('data', onSocketData) + response.socket.on('close', onSocketClose) + response.socket.on('error', onSocketError) - onHeaders (...args) { - return this.handler.onHeaders(...args) - } + if (channels.open.hasSubscribers) { + channels.open.publish({ + address: response.socket.address(), + protocol: secProtocol, + extensions: secExtension + }) + } - onData (...args) { - return this.handler.onData(...args) - } + onEstablish(response) + } + }) - onComplete (...args) { - return this.handler.onComplete(...args) - } + return controller +} - onBodySent (...args) { - return this.handler.onBodySent(...args) +/** + * @param {Buffer} chunk + */ +function onSocketData (chunk) { + if (!this.ws[kByteParser].write(chunk)) { + this.pause() } } +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 + */ +function onSocketClose () { + const { ws } = this -/***/ }), + // If the TCP connection was closed after the + // WebSocket closing handshake was completed, the WebSocket connection + // is said to have been closed _cleanly_. + const wasClean = ws[kSentClose] && ws[kReceivedClose] -/***/ 2860: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let code = 1005 + let reason = '' -"use strict"; + const result = ws[kByteParser].closingInfo + if (result) { + code = result.code ?? 1005 + reason = result.reason + } else if (!ws[kSentClose]) { + // If _The WebSocket + // Connection is Closed_ and no Close control frame was received by the + // endpoint (such as could occur if the underlying transport connection + // is lost), _The WebSocket Connection Close Code_ is considered to be + // 1006. + code = 1006 + } -const util = __nccwpck_require__(3983) -const { kBodyUsed } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { InvalidArgumentError } = __nccwpck_require__(8045) -const EE = __nccwpck_require__(2361) + // 1. Change the ready state to CLOSED (3). + ws[kReadyState] = states.CLOSED -const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] + // 2. If the user agent was required to fail the WebSocket + // connection, or if the WebSocket connection was closed + // after being flagged as full, fire an event named error + // at the WebSocket object. + // TODO -const kBody = Symbol('body') + // 3. Fire an event named close at the WebSocket object, + // using CloseEvent, with the wasClean attribute + // initialized to true if the connection closed cleanly + // and false otherwise, the code attribute initialized to + // the WebSocket connection close code, and the reason + // attribute initialized to the result of applying UTF-8 + // decode without BOM to the WebSocket connection close + // reason. + fireEvent('close', ws, CloseEvent, { + wasClean, code, reason + }) -class BodyAsyncIterable { - constructor (body) { - this[kBody] = body - this[kBodyUsed] = false + if (channels.close.hasSubscribers) { + channels.close.publish({ + websocket: ws, + code, + reason + }) } +} - async * [Symbol.asyncIterator] () { - assert(!this[kBodyUsed], 'disturbed') - this[kBodyUsed] = true - yield * this[kBody] +function onSocketError (error) { + const { ws } = this + + ws[kReadyState] = states.CLOSING + + if (channels.socketError.hasSubscribers) { + channels.socketError.publish(error) } + + this.destroy() } -class RedirectHandler { - constructor (dispatch, maxRedirections, opts, handler) { - if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { - throw new InvalidArgumentError('maxRedirections must be a positive number') - } +module.exports = { + establishWebSocketConnection +} - util.validateHandler(handler, opts.method, opts.upgrade) - this.dispatch = dispatch - this.location = null - this.abort = null - this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy - this.maxRedirections = maxRedirections - this.handler = handler - this.history = [] +/***/ }), - if (util.isStream(this.opts.body)) { - // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp - // so that it can be dispatched again? - // TODO (fix): Do we need 100-expect support to provide a way to do this properly? - if (util.bodyLength(this.opts.body) === 0) { - this.opts.body - .on('data', function () { - assert(false) - }) - } +/***/ 19188: +/***/ ((module) => { - if (typeof this.opts.body.readableDidRead !== 'boolean') { - this.opts.body[kBodyUsed] = false - EE.prototype.on.call(this.opts.body, 'data', function () { - this[kBodyUsed] = true - }) - } - } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { - // TODO (fix): We can't access ReadableStream internal state - // to determine whether or not it has been disturbed. This is just - // a workaround. - this.opts.body = new BodyAsyncIterable(this.opts.body) - } else if ( - this.opts.body && - typeof this.opts.body !== 'string' && - !ArrayBuffer.isView(this.opts.body) && - util.isIterable(this.opts.body) - ) { - // TODO: Should we allow re-using iterable if !this.opts.idempotent - // or through some other flag? - this.opts.body = new BodyAsyncIterable(this.opts.body) - } - } +"use strict"; - onConnect (abort) { - this.abort = abort - this.handler.onConnect(abort, { history: this.history }) - } - onUpgrade (statusCode, headers, socket) { - this.handler.onUpgrade(statusCode, headers, socket) - } +// This is a Globally Unique Identifier unique used +// to validate that the endpoint accepts websocket +// connections. +// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 +const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' - onError (error) { - this.handler.onError(error) - } +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} - onHeaders (statusCode, headers, resume, statusText) { - this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) - ? null - : parseLocation(statusCode, headers) +const states = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3 +} - if (this.opts.origin) { - this.history.push(new URL(this.opts.path, this.opts.origin)) - } +const opcodes = { + CONTINUATION: 0x0, + TEXT: 0x1, + BINARY: 0x2, + CLOSE: 0x8, + PING: 0x9, + PONG: 0xA +} - if (!this.location) { - return this.handler.onHeaders(statusCode, headers, resume, statusText) - } +const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 - const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) - const path = search ? `${pathname}${search}` : pathname +const parserStates = { + INFO: 0, + PAYLOADLENGTH_16: 2, + PAYLOADLENGTH_64: 3, + READ_DATA: 4 +} - // Remove headers referring to the original URL. - // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. - // https://tools.ietf.org/html/rfc7231#section-6.4 - this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) - this.opts.path = path - this.opts.origin = origin - this.opts.maxRedirections = 0 - this.opts.query = null +const emptyBuffer = Buffer.allocUnsafe(0) - // https://tools.ietf.org/html/rfc7231#section-6.4.4 - // In case of HTTP 303, always replace method to be either HEAD or GET - if (statusCode === 303 && this.opts.method !== 'HEAD') { - this.opts.method = 'GET' - this.opts.body = null - } - } +module.exports = { + uid, + staticPropertyDescriptors, + states, + opcodes, + maxUnsigned16Bit, + parserStates, + emptyBuffer +} - onData (chunk) { - if (this.location) { - /* - https://tools.ietf.org/html/rfc7231#section-6.4 - TLDR: undici always ignores 3xx response bodies. +/***/ }), - Redirection is used to serve the requested resource from another URL, so it is assumes that - no body is generated (and thus can be ignored). Even though generating a body is not prohibited. +/***/ 52611: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually - (which means it's optional and not mandated) contain just an hyperlink to the value of - the Location response header, so the body can be ignored safely. +"use strict"; - For status 300, which is "Multiple Choices", the spec mentions both generating a Location - response header AND a response body with the other possible location to follow. - Since the spec explicitily chooses not to specify a format for such body and leave it to - servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. - */ - } else { - return this.handler.onData(chunk) - } - } - onComplete (trailers) { - if (this.location) { - /* - https://tools.ietf.org/html/rfc7231#section-6.4 +const { webidl } = __nccwpck_require__(21744) +const { kEnumerableProperty } = __nccwpck_require__(83983) +const { MessagePort } = __nccwpck_require__(71267) - TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections - and neither are useful if present. +/** + * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent + */ +class MessageEvent extends Event { + #eventInit - See comment on onData method above for more detailed informations. - */ + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) - this.location = null - this.abort = null + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.MessageEventInit(eventInitDict) - this.dispatch(this.opts, this) - } else { - this.handler.onComplete(trailers) - } - } + super(type, eventInitDict) - onBodySent (chunk) { - if (this.handler.onBodySent) { - this.handler.onBodySent(chunk) - } + this.#eventInit = eventInitDict } -} -function parseLocation (statusCode, headers) { - if (redirectableStatusCodes.indexOf(statusCode) === -1) { - return null + get data () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.data } - for (let i = 0; i < headers.length; i += 2) { - if (headers[i].toString().toLowerCase() === 'location') { - return headers[i + 1] - } + get origin () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.origin } -} -// https://tools.ietf.org/html/rfc7231#section-6.4.4 -function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) -} + get lastEventId () { + webidl.brandCheck(this, MessageEvent) -// https://tools.ietf.org/html/rfc7231#section-6.4 -function cleanRequestHeaders (headers, removeContent, unknownOrigin) { - const ret = [] - if (Array.isArray(headers)) { - for (let i = 0; i < headers.length; i += 2) { - if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { - ret.push(headers[i], headers[i + 1]) - } - } - } else if (headers && typeof headers === 'object') { - for (const key of Object.keys(headers)) { - if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { - ret.push(key, headers[key]) - } - } - } else { - assert(headers == null, 'headers must be an object or an array') + return this.#eventInit.lastEventId } - return ret -} -module.exports = RedirectHandler + get source () { + webidl.brandCheck(this, MessageEvent) + return this.#eventInit.source + } -/***/ }), + get ports () { + webidl.brandCheck(this, MessageEvent) -/***/ 2286: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!Object.isFrozen(this.#eventInit.ports)) { + Object.freeze(this.#eventInit.ports) + } -const assert = __nccwpck_require__(9491) + return this.#eventInit.ports + } -const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785) -const { RequestRetryError } = __nccwpck_require__(8045) -const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3983) + initMessageEvent ( + type, + bubbles = false, + cancelable = false, + data = null, + origin = '', + lastEventId = '', + source = null, + ports = [] + ) { + webidl.brandCheck(this, MessageEvent) -function calculateRetryAfterHeader (retryAfter) { - const current = Date.now() - const diff = new Date(retryAfter).getTime() - current + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) - return diff + return new MessageEvent(type, { + bubbles, cancelable, data, origin, lastEventId, source, ports + }) + } } -class RetryHandler { - constructor (opts, handlers) { - const { retryOptions, ...dispatchOpts } = opts - const { - // Retry scoped - retry: retryFn, - maxRetries, - maxTimeout, - minTimeout, - timeoutFactor, - // Response scoped - methods, - errorCodes, - retryAfter, - statusCodes - } = retryOptions ?? {} +/** + * @see https://websockets.spec.whatwg.org/#the-closeevent-interface + */ +class CloseEvent extends Event { + #eventInit - this.dispatch = handlers.dispatch - this.handler = handlers.handler - this.opts = dispatchOpts - this.abort = null - this.aborted = false - this.retryOpts = { - retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], - retryAfter: retryAfter ?? true, - maxTimeout: maxTimeout ?? 30 * 1000, // 30s, - timeout: minTimeout ?? 500, // .5s - timeoutFactor: timeoutFactor ?? 2, - maxRetries: maxRetries ?? 5, - // What errors we should retry - methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], - // Indicates which errors to retry - statusCodes: statusCodes ?? [500, 502, 503, 504, 429], - // List of errors to retry - errorCodes: errorCodes ?? [ - 'ECONNRESET', - 'ECONNREFUSED', - 'ENOTFOUND', - 'ENETDOWN', - 'ENETUNREACH', - 'EHOSTDOWN', - 'EHOSTUNREACH', - 'EPIPE' - ] - } + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) - this.retryCount = 0 - this.start = 0 - this.end = null - this.etag = null - this.resume = null + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.CloseEventInit(eventInitDict) - // Handle possible onConnect duplication - this.handler.onConnect(reason => { - this.aborted = true - if (this.abort) { - this.abort(reason) - } else { - this.reason = reason - } - }) - } + super(type, eventInitDict) - onRequestSent () { - if (this.handler.onRequestSent) { - this.handler.onRequestSent() - } + this.#eventInit = eventInitDict } - onUpgrade (statusCode, headers, socket) { - if (this.handler.onUpgrade) { - this.handler.onUpgrade(statusCode, headers, socket) - } - } + get wasClean () { + webidl.brandCheck(this, CloseEvent) - onConnect (abort) { - if (this.aborted) { - abort(this.reason) - } else { - this.abort = abort - } + return this.#eventInit.wasClean } - onBodySent (chunk) { - if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + get code () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.code } - static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { - const { statusCode, code, headers } = err - const { method, retryOptions } = opts - const { - maxRetries, - timeout, - maxTimeout, - timeoutFactor, - statusCodes, - errorCodes, - methods - } = retryOptions - let { counter, currentTimeout } = state + get reason () { + webidl.brandCheck(this, CloseEvent) - currentTimeout = - currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + return this.#eventInit.reason + } +} - // Any code that is not a Undici's originated and allowed to retry - if ( - code && - code !== 'UND_ERR_REQ_RETRY' && - code !== 'UND_ERR_SOCKET' && - !errorCodes.includes(code) - ) { - cb(err) - return - } +// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface +class ErrorEvent extends Event { + #eventInit - // If a set of method are provided and the current method is not in the list - if (Array.isArray(methods) && !methods.includes(method)) { - cb(err) - return - } + constructor (type, eventInitDict) { + webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) - // If a set of status code are provided and the current status code is not in the list - if ( - statusCode != null && - Array.isArray(statusCodes) && - !statusCodes.includes(statusCode) - ) { - cb(err) - return - } + super(type, eventInitDict) - // If we reached the max number of retries - if (counter > maxRetries) { - cb(err) - return - } + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) - let retryAfterHeader = headers != null && headers['retry-after'] - if (retryAfterHeader) { - retryAfterHeader = Number(retryAfterHeader) - retryAfterHeader = isNaN(retryAfterHeader) - ? calculateRetryAfterHeader(retryAfterHeader) - : retryAfterHeader * 1e3 // Retry-After is in seconds - } + this.#eventInit = eventInitDict + } - const retryTimeout = - retryAfterHeader > 0 - ? Math.min(retryAfterHeader, maxTimeout) - : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + get message () { + webidl.brandCheck(this, ErrorEvent) - state.currentTimeout = retryTimeout + return this.#eventInit.message + } - setTimeout(() => cb(null), retryTimeout) + get filename () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.filename } - onHeaders (statusCode, rawHeaders, resume, statusMessage) { - const headers = parseHeaders(rawHeaders) + get lineno () { + webidl.brandCheck(this, ErrorEvent) - this.retryCount += 1 + return this.#eventInit.lineno + } - if (statusCode >= 300) { - this.abort( - new RequestRetryError('Request failed', statusCode, { - headers, - count: this.retryCount - }) - ) - return false - } + get colno () { + webidl.brandCheck(this, ErrorEvent) - // Checkpoint for resume from where we left it - if (this.resume != null) { - this.resume = null + return this.#eventInit.colno + } - if (statusCode !== 206) { - return true - } + get error () { + webidl.brandCheck(this, ErrorEvent) - const contentRange = parseRangeHeader(headers['content-range']) - // If no content range - if (!contentRange) { - this.abort( - new RequestRetryError('Content-Range mismatch', statusCode, { - headers, - count: this.retryCount - }) - ) - return false - } + return this.#eventInit.error + } +} - // Let's start with a weak etag check - if (this.etag != null && this.etag !== headers.etag) { - this.abort( - new RequestRetryError('ETag mismatch', statusCode, { - headers, - count: this.retryCount - }) - ) - return false - } +Object.defineProperties(MessageEvent.prototype, { + [Symbol.toStringTag]: { + value: 'MessageEvent', + configurable: true + }, + data: kEnumerableProperty, + origin: kEnumerableProperty, + lastEventId: kEnumerableProperty, + source: kEnumerableProperty, + ports: kEnumerableProperty, + initMessageEvent: kEnumerableProperty +}) - const { start, size, end = size } = contentRange +Object.defineProperties(CloseEvent.prototype, { + [Symbol.toStringTag]: { + value: 'CloseEvent', + configurable: true + }, + reason: kEnumerableProperty, + code: kEnumerableProperty, + wasClean: kEnumerableProperty +}) - assert(this.start === start, 'content-range mismatch') - assert(this.end == null || this.end === end, 'content-range mismatch') +Object.defineProperties(ErrorEvent.prototype, { + [Symbol.toStringTag]: { + value: 'ErrorEvent', + configurable: true + }, + message: kEnumerableProperty, + filename: kEnumerableProperty, + lineno: kEnumerableProperty, + colno: kEnumerableProperty, + error: kEnumerableProperty +}) - this.resume = resume - return true - } +webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) - if (this.end == null) { - if (statusCode === 206) { - // First time we receive 206 - const range = parseRangeHeader(headers['content-range']) +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.MessagePort +) - if (range == null) { - return this.handler.onHeaders( - statusCode, - rawHeaders, - resume, - statusMessage - ) - } +const eventInit = [ + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +] - const { start, size, end = size } = range +webidl.converters.MessageEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'data', + converter: webidl.converters.any, + defaultValue: null + }, + { + key: 'origin', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lastEventId', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'source', + // Node doesn't implement WindowProxy or ServiceWorker, so the only + // valid value for source is a MessagePort. + converter: webidl.nullableConverter(webidl.converters.MessagePort), + defaultValue: null + }, + { + key: 'ports', + converter: webidl.converters['sequence'], + get defaultValue () { + return [] + } + } +]) - assert( - start != null && Number.isFinite(start) && this.start !== start, - 'content-range mismatch' - ) - assert(Number.isFinite(start)) - assert( - end != null && Number.isFinite(end) && this.end !== end, - 'invalid content-length' - ) +webidl.converters.CloseEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'wasClean', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'code', + converter: webidl.converters['unsigned short'], + defaultValue: 0 + }, + { + key: 'reason', + converter: webidl.converters.USVString, + defaultValue: '' + } +]) + +webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'message', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'filename', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lineno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'colno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'error', + converter: webidl.converters.any + } +]) - this.start = start - this.end = end - } +module.exports = { + MessageEvent, + CloseEvent, + ErrorEvent +} - // We make our best to checkpoint the body for further range headers - if (this.end == null) { - const contentLength = headers['content-length'] - this.end = contentLength != null ? Number(contentLength) : null - } - assert(Number.isFinite(this.start)) - assert( - this.end == null || Number.isFinite(this.end), - 'invalid content-length' - ) +/***/ }), - this.resume = resume - this.etag = headers.etag != null ? headers.etag : null +/***/ 25444: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return this.handler.onHeaders( - statusCode, - rawHeaders, - resume, - statusMessage - ) - } +"use strict"; - const err = new RequestRetryError('Request failed', statusCode, { - headers, - count: this.retryCount - }) - this.abort(err) +const { maxUnsigned16Bit } = __nccwpck_require__(19188) - return false - } +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6113) +} catch { - onData (chunk) { - this.start += chunk.length +} - return this.handler.onData(chunk) +class WebsocketFrameSend { + /** + * @param {Buffer|undefined} data + */ + constructor (data) { + this.frameData = data + this.maskKey = crypto.randomBytes(4) } - onComplete (rawTrailers) { - this.retryCount = 0 - return this.handler.onComplete(rawTrailers) - } + createFrame (opcode) { + const bodyLength = this.frameData?.byteLength ?? 0 - onError (err) { - if (this.aborted || isDisturbed(this.opts.body)) { - return this.handler.onError(err) + /** @type {number} */ + let payloadLength = bodyLength // 0-125 + let offset = 6 + + if (bodyLength > maxUnsigned16Bit) { + offset += 8 // payload length is next 8 bytes + payloadLength = 127 + } else if (bodyLength > 125) { + offset += 2 // payload length is next 2 bytes + payloadLength = 126 } - this.retryOpts.retry( - err, - { - state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, - opts: { retryOptions: this.retryOpts, ...this.opts } - }, - onRetry.bind(this) - ) + const buffer = Buffer.allocUnsafe(bodyLength + offset) - function onRetry (err) { - if (err != null || this.aborted || isDisturbed(this.opts.body)) { - return this.handler.onError(err) - } + // Clear first 2 bytes, everything else is overwritten + buffer[0] = buffer[1] = 0 + buffer[0] |= 0x80 // FIN + buffer[0] = (buffer[0] & 0xF0) + opcode // opcode - if (this.start !== 0) { - this.opts = { - ...this.opts, - headers: { - ...this.opts.headers, - range: `bytes=${this.start}-${this.end ?? ''}` - } - } - } + /*! ws. MIT License. Einar Otto Stangvik */ + buffer[offset - 4] = this.maskKey[0] + buffer[offset - 3] = this.maskKey[1] + buffer[offset - 2] = this.maskKey[2] + buffer[offset - 1] = this.maskKey[3] - try { - this.dispatch(this.opts, this) - } catch (err) { - this.handler.onError(err) - } + buffer[1] = payloadLength + + if (payloadLength === 126) { + buffer.writeUInt16BE(bodyLength, 2) + } else if (payloadLength === 127) { + // Clear extended payload length + buffer[2] = buffer[3] = 0 + buffer.writeUIntBE(bodyLength, 4, 6) + } + + buffer[1] |= 0x80 // MASK + + // mask body + for (let i = 0; i < bodyLength; i++) { + buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] } + + return buffer } } -module.exports = RetryHandler +module.exports = { + WebsocketFrameSend +} /***/ }), -/***/ 8861: +/***/ 11688: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const RedirectHandler = __nccwpck_require__(2860) +const { Writable } = __nccwpck_require__(12781) +const diagnosticsChannel = __nccwpck_require__(67643) +const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(19188) +const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(37578) +const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(25515) +const { WebsocketFrameSend } = __nccwpck_require__(25444) -function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { - return (dispatch) => { - return function Intercept (opts, handler) { - const { maxRedirections = defaultMaxRedirections } = opts +// This code was influenced by ws released under the MIT license. +// Copyright (c) 2011 Einar Otto Stangvik +// Copyright (c) 2013 Arnout Kazemier and contributors +// Copyright (c) 2016 Luigi Pinca and contributors - if (!maxRedirections) { - return dispatch(opts, handler) - } +const channels = {} +channels.ping = diagnosticsChannel.channel('undici:websocket:ping') +channels.pong = diagnosticsChannel.channel('undici:websocket:pong') - const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) - opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. - return dispatch(opts, redirectHandler) - } +class ByteParser extends Writable { + #buffers = [] + #byteOffset = 0 + + #state = parserStates.INFO + + #info = {} + #fragments = [] + + constructor (ws) { + super() + + this.ws = ws } -} -module.exports = createRedirectInterceptor + /** + * @param {Buffer} chunk + * @param {() => void} callback + */ + _write (chunk, _, callback) { + this.#buffers.push(chunk) + this.#byteOffset += chunk.length + this.run(callback) + } -/***/ }), + /** + * Runs whenever a new chunk is received. + * Callback is called whenever there are no more chunks buffering, + * or not enough bytes are buffered to parse. + */ + run (callback) { + while (true) { + if (this.#state === parserStates.INFO) { + // If there aren't enough bytes to parse the payload length, etc. + if (this.#byteOffset < 2) { + return callback() + } -/***/ 953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + const buffer = this.consume(2) -"use strict"; + this.#info.fin = (buffer[0] & 0x80) !== 0 + this.#info.opcode = buffer[0] & 0x0F -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; -const utils_1 = __nccwpck_require__(1891); -// C headers -var ERROR; -(function (ERROR) { - ERROR[ERROR["OK"] = 0] = "OK"; - ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; - ERROR[ERROR["STRICT"] = 2] = "STRICT"; - ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; - ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; - ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; - ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; - ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; - ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; - ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; - ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; - ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; - ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; - ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; - ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; - ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; - ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; - ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; - ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; - ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; - ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; - ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; - ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; - ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; - ERROR[ERROR["USER"] = 24] = "USER"; -})(ERROR = exports.ERROR || (exports.ERROR = {})); -var TYPE; -(function (TYPE) { - TYPE[TYPE["BOTH"] = 0] = "BOTH"; - TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; - TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; -})(TYPE = exports.TYPE || (exports.TYPE = {})); -var FLAGS; -(function (FLAGS) { - FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; - FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; - FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; - FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; - FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; - FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; - FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; - FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; - // 1 << 8 is unused - FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; -})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); -var LENIENT_FLAGS; -(function (LENIENT_FLAGS) { - LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; - LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; - LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; -})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); -var METHODS; -(function (METHODS) { - METHODS[METHODS["DELETE"] = 0] = "DELETE"; - METHODS[METHODS["GET"] = 1] = "GET"; - METHODS[METHODS["HEAD"] = 2] = "HEAD"; - METHODS[METHODS["POST"] = 3] = "POST"; - METHODS[METHODS["PUT"] = 4] = "PUT"; - /* pathological */ - METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; - METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; - METHODS[METHODS["TRACE"] = 7] = "TRACE"; - /* WebDAV */ - METHODS[METHODS["COPY"] = 8] = "COPY"; - METHODS[METHODS["LOCK"] = 9] = "LOCK"; - METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; - METHODS[METHODS["MOVE"] = 11] = "MOVE"; - METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; - METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; - METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; - METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; - METHODS[METHODS["BIND"] = 16] = "BIND"; - METHODS[METHODS["REBIND"] = 17] = "REBIND"; - METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; - METHODS[METHODS["ACL"] = 19] = "ACL"; - /* subversion */ - METHODS[METHODS["REPORT"] = 20] = "REPORT"; - METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; - METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; - METHODS[METHODS["MERGE"] = 23] = "MERGE"; - /* upnp */ - METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; - METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; - METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; - METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; - /* RFC-5789 */ - METHODS[METHODS["PATCH"] = 28] = "PATCH"; - METHODS[METHODS["PURGE"] = 29] = "PURGE"; - /* CalDAV */ - METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; - /* RFC-2068, section 19.6.1.2 */ - METHODS[METHODS["LINK"] = 31] = "LINK"; - METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; - /* icecast */ - METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; - /* RFC-7540, section 11.6 */ - METHODS[METHODS["PRI"] = 34] = "PRI"; - /* RFC-2326 RTSP */ - METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; - METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; - METHODS[METHODS["SETUP"] = 37] = "SETUP"; - METHODS[METHODS["PLAY"] = 38] = "PLAY"; - METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; - METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; - METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; - METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; - METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; - METHODS[METHODS["RECORD"] = 44] = "RECORD"; - /* RAOP */ - METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; -})(METHODS = exports.METHODS || (exports.METHODS = {})); -exports.METHODS_HTTP = [ - METHODS.DELETE, - METHODS.GET, - METHODS.HEAD, - METHODS.POST, - METHODS.PUT, - METHODS.CONNECT, - METHODS.OPTIONS, - METHODS.TRACE, - METHODS.COPY, - METHODS.LOCK, - METHODS.MKCOL, - METHODS.MOVE, - METHODS.PROPFIND, - METHODS.PROPPATCH, - METHODS.SEARCH, - METHODS.UNLOCK, - METHODS.BIND, - METHODS.REBIND, - METHODS.UNBIND, - METHODS.ACL, - METHODS.REPORT, - METHODS.MKACTIVITY, - METHODS.CHECKOUT, - METHODS.MERGE, - METHODS['M-SEARCH'], - METHODS.NOTIFY, - METHODS.SUBSCRIBE, - METHODS.UNSUBSCRIBE, - METHODS.PATCH, - METHODS.PURGE, - METHODS.MKCALENDAR, - METHODS.LINK, - METHODS.UNLINK, - METHODS.PRI, - // TODO(indutny): should we allow it with HTTP? - METHODS.SOURCE, -]; -exports.METHODS_ICE = [ - METHODS.SOURCE, -]; -exports.METHODS_RTSP = [ - METHODS.OPTIONS, - METHODS.DESCRIBE, - METHODS.ANNOUNCE, - METHODS.SETUP, - METHODS.PLAY, - METHODS.PAUSE, - METHODS.TEARDOWN, - METHODS.GET_PARAMETER, - METHODS.SET_PARAMETER, - METHODS.REDIRECT, - METHODS.RECORD, - METHODS.FLUSH, - // For AirPlay - METHODS.GET, - METHODS.POST, -]; -exports.METHOD_MAP = utils_1.enumToMap(METHODS); -exports.H_METHOD_MAP = {}; -Object.keys(exports.METHOD_MAP).forEach((key) => { - if (/^H/.test(key)) { - exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; - } -}); -var FINISH; -(function (FINISH) { - FINISH[FINISH["SAFE"] = 0] = "SAFE"; - FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; - FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; -})(FINISH = exports.FINISH || (exports.FINISH = {})); -exports.ALPHA = []; -for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { - // Upper case - exports.ALPHA.push(String.fromCharCode(i)); - // Lower case - exports.ALPHA.push(String.fromCharCode(i + 0x20)); -} -exports.NUM_MAP = { - 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, - 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, -}; -exports.HEX_MAP = { - 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, - 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, - A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, - a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, -}; -exports.NUM = [ - '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', -]; -exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); -exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; -exports.USERINFO_CHARS = exports.ALPHANUM - .concat(exports.MARK) - .concat(['%', ';', ':', '&', '=', '+', '$', ',']); -// TODO(indutny): use RFC -exports.STRICT_URL_CHAR = [ - '!', '"', '$', '%', '&', '\'', - '(', ')', '*', '+', ',', '-', '.', '/', - ':', ';', '<', '=', '>', - '@', '[', '\\', ']', '^', '_', - '`', - '{', '|', '}', '~', -].concat(exports.ALPHANUM); -exports.URL_CHAR = exports.STRICT_URL_CHAR - .concat(['\t', '\f']); -// All characters with 0x80 bit set to 1 -for (let i = 0x80; i <= 0xff; i++) { - exports.URL_CHAR.push(i); -} -exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); -/* Tokens as defined by rfc 2616. Also lowercases them. - * token = 1* - * separators = "(" | ")" | "<" | ">" | "@" - * | "," | ";" | ":" | "\" | <"> - * | "/" | "[" | "]" | "?" | "=" - * | "{" | "}" | SP | HT - */ -exports.STRICT_TOKEN = [ - '!', '#', '$', '%', '&', '\'', - '*', '+', '-', '.', - '^', '_', '`', - '|', '~', -].concat(exports.ALPHANUM); -exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); -/* - * Verify that a char is a valid visible (printable) US-ASCII - * character or %x80-FF - */ -exports.HEADER_CHARS = ['\t']; -for (let i = 32; i <= 255; i++) { - if (i !== 127) { - exports.HEADER_CHARS.push(i); - } -} -// ',' = \x44 -exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); -exports.MAJOR = exports.NUM_MAP; -exports.MINOR = exports.MAJOR; -var HEADER_STATE; -(function (HEADER_STATE) { - HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; - HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; - HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; - HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; - HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; - HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; - HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; - HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; - HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; -})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); -exports.SPECIAL_HEADERS = { - 'connection': HEADER_STATE.CONNECTION, - 'content-length': HEADER_STATE.CONTENT_LENGTH, - 'proxy-connection': HEADER_STATE.CONNECTION, - 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, - 'upgrade': HEADER_STATE.UPGRADE, -}; -//# sourceMappingURL=constants.js.map + // If we receive a fragmented message, we use the type of the first + // frame to parse the full message as binary/text, when it's terminated + this.#info.originalOpcode ??= this.#info.opcode + + this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + + if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { + // Only text and binary frames can be fragmented + failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') + return + } + + const payloadLength = buffer[1] & 0x7F + + if (payloadLength <= 125) { + this.#info.payloadLength = payloadLength + this.#state = parserStates.READ_DATA + } else if (payloadLength === 126) { + this.#state = parserStates.PAYLOADLENGTH_16 + } else if (payloadLength === 127) { + this.#state = parserStates.PAYLOADLENGTH_64 + } + + if (this.#info.fragmented && payloadLength > 125) { + // A fragmented frame can't be fragmented itself + failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') + return + } else if ( + (this.#info.opcode === opcodes.PING || + this.#info.opcode === opcodes.PONG || + this.#info.opcode === opcodes.CLOSE) && + payloadLength > 125 + ) { + // Control frames can have a payload length of 125 bytes MAX + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') + return + } else if (this.#info.opcode === opcodes.CLOSE) { + if (payloadLength === 1) { + failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') + return + } -/***/ }), + const body = this.consume(payloadLength) -/***/ 1145: -/***/ ((module) => { + this.#info.closeInfo = this.parseCloseBody(false, body) -module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' + if (!this.ws[kSentClose]) { + // If an endpoint receives a Close frame and did not previously send a + // Close frame, the endpoint MUST send a Close frame in response. (When + // sending a Close frame in response, the endpoint typically echos the + // status code it received.) + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(this.#info.closeInfo.code, 0) + const closeFrame = new WebsocketFrameSend(body) + this.ws[kResponse].socket.write( + closeFrame.createFrame(opcodes.CLOSE), + (err) => { + if (!err) { + this.ws[kSentClose] = true + } + } + ) + } -/***/ }), + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this.ws[kReadyState] = states.CLOSING + this.ws[kReceivedClose] = true -/***/ 5627: -/***/ ((module) => { + this.end() -module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' + return + } else if (this.#info.opcode === opcodes.PING) { + // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in + // response, unless it already received a Close frame. + // A Pong frame sent in response to a Ping frame must have identical + // "Application data" + const body = this.consume(payloadLength) -/***/ }), + if (!this.ws[kReceivedClose]) { + const frame = new WebsocketFrameSend(body) -/***/ 1891: -/***/ ((__unused_webpack_module, exports) => { + this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) -"use strict"; + if (channels.ping.hasSubscribers) { + channels.ping.publish({ + payload: body + }) + } + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.enumToMap = void 0; -function enumToMap(obj) { - const res = {}; - Object.keys(obj).forEach((key) => { - const value = obj[key]; - if (typeof value === 'number') { - res[key] = value; + this.#state = parserStates.INFO + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } else if (this.#info.opcode === opcodes.PONG) { + // A Pong frame MAY be sent unsolicited. This serves as a + // unidirectional heartbeat. A response to an unsolicited Pong frame is + // not expected. + + const body = this.consume(payloadLength) + + if (channels.pong.hasSubscribers) { + channels.pong.publish({ + payload: body + }) + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } + } else if (this.#state === parserStates.PAYLOADLENGTH_16) { + if (this.#byteOffset < 2) { + return callback() } - }); - return res; -} -exports.enumToMap = enumToMap; -//# sourceMappingURL=utils.js.map -/***/ }), + const buffer = this.consume(2) -/***/ 6771: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this.#info.payloadLength = buffer.readUInt16BE(0) + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.PAYLOADLENGTH_64) { + if (this.#byteOffset < 8) { + return callback() + } -"use strict"; + const buffer = this.consume(8) + const upper = buffer.readUInt32BE(0) + // 2^31 is the maxinimum bytes an arraybuffer can contain + // on 32-bit systems. Although, on 64-bit systems, this is + // 2^53-1 bytes. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e + if (upper > 2 ** 31 - 1) { + failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') + return + } -const { kClients } = __nccwpck_require__(2785) -const Agent = __nccwpck_require__(7890) -const { - kAgent, - kMockAgentSet, - kMockAgentGet, - kDispatches, - kIsMockActive, - kNetConnect, - kGetNetConnect, - kOptions, - kFactory -} = __nccwpck_require__(4347) -const MockClient = __nccwpck_require__(8687) -const MockPool = __nccwpck_require__(6193) -const { matchValue, buildMockOptions } = __nccwpck_require__(9323) -const { InvalidArgumentError, UndiciError } = __nccwpck_require__(8045) -const Dispatcher = __nccwpck_require__(412) -const Pluralizer = __nccwpck_require__(8891) -const PendingInterceptorsFormatter = __nccwpck_require__(6823) + const lower = buffer.readUInt32BE(4) -class FakeWeakRef { - constructor (value) { - this.value = value - } + this.#info.payloadLength = (upper << 8) + lower + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.READ_DATA) { + if (this.#byteOffset < this.#info.payloadLength) { + // If there is still more data in this chunk that needs to be read + return callback() + } else if (this.#byteOffset >= this.#info.payloadLength) { + // If the server sent multiple frames in a single chunk - deref () { - return this.value - } -} + const body = this.consume(this.#info.payloadLength) -class MockAgent extends Dispatcher { - constructor (opts) { - super(opts) + this.#fragments.push(body) - this[kNetConnect] = true - this[kIsMockActive] = true + // If the frame is unfragmented, or a fragmented frame was terminated, + // a message was received + if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { + const fullMessage = Buffer.concat(this.#fragments) - // Instantiate Agent and encapsulate - if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { - throw new InvalidArgumentError('Argument opts.agent must implement Agent') - } - const agent = opts && opts.agent ? opts.agent : new Agent(opts) - this[kAgent] = agent + websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) - this[kClients] = agent[kClients] - this[kOptions] = buildMockOptions(opts) - } + this.#info = {} + this.#fragments.length = 0 + } - get (origin) { - let dispatcher = this[kMockAgentGet](origin) + this.#state = parserStates.INFO + } + } - if (!dispatcher) { - dispatcher = this[kFactory](origin) - this[kMockAgentSet](origin, dispatcher) + if (this.#byteOffset > 0) { + continue + } else { + callback() + break + } } - return dispatcher } - dispatch (opts, handler) { - // Call MockAgent.get to perform additional setup before dispatching as normal - this.get(opts.origin) - return this[kAgent].dispatch(opts, handler) - } + /** + * Take n bytes from the buffered Buffers + * @param {number} n + * @returns {Buffer|null} + */ + consume (n) { + if (n > this.#byteOffset) { + return null + } else if (n === 0) { + return emptyBuffer + } - async close () { - await this[kAgent].close() - this[kClients].clear() - } + if (this.#buffers[0].length === n) { + this.#byteOffset -= this.#buffers[0].length + return this.#buffers.shift() + } - deactivate () { - this[kIsMockActive] = false - } + const buffer = Buffer.allocUnsafe(n) + let offset = 0 - activate () { - this[kIsMockActive] = true - } + while (offset !== n) { + const next = this.#buffers[0] + const { length } = next - enableNetConnect (matcher) { - if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { - if (Array.isArray(this[kNetConnect])) { - this[kNetConnect].push(matcher) + if (length + offset === n) { + buffer.set(this.#buffers.shift(), offset) + break + } else if (length + offset > n) { + buffer.set(next.subarray(0, n - offset), offset) + this.#buffers[0] = next.subarray(n - offset) + break } else { - this[kNetConnect] = [matcher] + buffer.set(this.#buffers.shift(), offset) + offset += next.length } - } else if (typeof matcher === 'undefined') { - this[kNetConnect] = true - } else { - throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') } - } - disableNetConnect () { - this[kNetConnect] = false - } + this.#byteOffset -= n - // This is required to bypass issues caused by using global symbols - see: - // https://github.com/nodejs/undici/issues/1447 - get isMockActive () { - return this[kIsMockActive] + return buffer } - [kMockAgentSet] (origin, dispatcher) { - this[kClients].set(origin, new FakeWeakRef(dispatcher)) - } + parseCloseBody (onlyCode, data) { + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 + /** @type {number|undefined} */ + let code - [kFactory] (origin) { - const mockOptions = Object.assign({ agent: this }, this[kOptions]) - return this[kOptions] && this[kOptions].connections === 1 - ? new MockClient(origin, mockOptions) - : new MockPool(origin, mockOptions) - } + if (data.length >= 2) { + // _The WebSocket Connection Close Code_ is + // defined as the status code (Section 7.4) contained in the first Close + // control frame received by the application + code = data.readUInt16BE(0) + } - [kMockAgentGet] (origin) { - // First check if we can immediately find it - const ref = this[kClients].get(origin) - if (ref) { - return ref.deref() + if (onlyCode) { + if (!isValidStatusCode(code)) { + return null + } + + return { code } } - // If the origin is not a string create a dummy parent pool and return to user - if (typeof origin !== 'string') { - const dispatcher = this[kFactory]('http://localhost:9999') - this[kMockAgentSet](origin, dispatcher) - return dispatcher + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 + /** @type {Buffer} */ + let reason = data.subarray(2) + + // Remove BOM + if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { + reason = reason.subarray(3) } - // If we match, create a pool and assign the same dispatches - for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { - const nonExplicitDispatcher = nonExplicitRef.deref() - if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { - const dispatcher = this[kFactory](origin) - this[kMockAgentSet](origin, dispatcher) - dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] - return dispatcher - } + if (code !== undefined && !isValidStatusCode(code)) { + return null + } + + try { + // TODO: optimize this + reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) + } catch { + return null } + + return { code, reason } } - [kGetNetConnect] () { - return this[kNetConnect] + get closingInfo () { + return this.#info.closeInfo } +} - pendingInterceptors () { - const mockAgentClients = this[kClients] +module.exports = { + ByteParser +} - return Array.from(mockAgentClients.entries()) - .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) - .filter(({ pending }) => pending) - } - assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { - const pending = this.pendingInterceptors() +/***/ }), - if (pending.length === 0) { - return - } +/***/ 37578: +/***/ ((module) => { - const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) +"use strict"; - throw new UndiciError(` -${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: -${pendingInterceptorsFormatter.format(pending)} -`.trim()) - } +module.exports = { + kWebSocketURL: Symbol('url'), + kReadyState: Symbol('ready state'), + kController: Symbol('controller'), + kResponse: Symbol('response'), + kBinaryType: Symbol('binary type'), + kSentClose: Symbol('sent close'), + kReceivedClose: Symbol('received close'), + kByteParser: Symbol('byte parser') } -module.exports = MockAgent - /***/ }), -/***/ 8687: +/***/ 25515: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { promisify } = __nccwpck_require__(3837) -const Client = __nccwpck_require__(3598) -const { buildMockDispatch } = __nccwpck_require__(9323) -const { - kDispatches, - kMockAgent, - kClose, - kOriginalClose, - kOrigin, - kOriginalDispatch, - kConnected -} = __nccwpck_require__(4347) -const { MockInterceptor } = __nccwpck_require__(410) -const Symbols = __nccwpck_require__(2785) -const { InvalidArgumentError } = __nccwpck_require__(8045) +const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(37578) +const { states, opcodes } = __nccwpck_require__(19188) +const { MessageEvent, ErrorEvent } = __nccwpck_require__(52611) + +/* globals Blob */ /** - * MockClient provides an API that extends the Client to influence the mockDispatches. + * @param {import('./websocket').WebSocket} ws */ -class MockClient extends Client { - constructor (origin, opts) { - super(origin, opts) +function isEstablished (ws) { + // If the server's response is validated as provided for above, it is + // said that _The WebSocket Connection is Established_ and that the + // WebSocket Connection is in the OPEN state. + return ws[kReadyState] === states.OPEN +} - if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { - throw new InvalidArgumentError('Argument opts.agent must implement Agent') - } +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosing (ws) { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + return ws[kReadyState] === states.CLOSING +} - this[kMockAgent] = opts.agent - this[kOrigin] = origin - this[kDispatches] = [] - this[kConnected] = 1 - this[kOriginalDispatch] = this.dispatch - this[kOriginalClose] = this.close.bind(this) +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosed (ws) { + return ws[kReadyState] === states.CLOSED +} - this.dispatch = buildMockDispatch.call(this) - this.close = this[kClose] +/** + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e + * @param {EventTarget} target + * @param {EventInit | undefined} eventInitDict + */ +function fireEvent (e, target, eventConstructor = Event, eventInitDict) { + // 1. If eventConstructor is not given, then let eventConstructor be Event. + + // 2. Let event be the result of creating an event given eventConstructor, + // in the relevant realm of target. + // 3. Initialize event’s type attribute to e. + const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap + + // 4. Initialize any other IDL attributes of event as described in the + // invocation of this algorithm. + + // 5. Return the result of dispatching event at target, with legacy target + // override flag set if set. + target.dispatchEvent(event) +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @param {import('./websocket').WebSocket} ws + * @param {number} type Opcode + * @param {Buffer} data application data + */ +function websocketMessageReceived (ws, type, data) { + // 1. If ready state is not OPEN (1), then return. + if (ws[kReadyState] !== states.OPEN) { + return } - get [Symbols.kConnected] () { - return this[kConnected] + // 2. Let dataForEvent be determined by switching on type and binary type: + let dataForEvent + + if (type === opcodes.TEXT) { + // -> type indicates that the data is Text + // a new DOMString containing data + try { + dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) + } catch { + failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') + return + } + } else if (type === opcodes.BINARY) { + if (ws[kBinaryType] === 'blob') { + // -> type indicates that the data is Binary and binary type is "blob" + // a new Blob object, created in the relevant Realm of the WebSocket + // object, that represents data as its raw data + dataForEvent = new Blob([data]) + } else { + // -> type indicates that the data is Binary and binary type is "arraybuffer" + // a new ArrayBuffer object, created in the relevant Realm of the + // WebSocket object, whose contents are data + dataForEvent = new Uint8Array(data).buffer + } } - /** - * Sets up the base interceptor for mocking replies from undici. - */ - intercept (opts) { - return new MockInterceptor(opts, this[kDispatches]) + // 3. Fire an event named message at the WebSocket object, using MessageEvent, + // with the origin attribute initialized to the serialization of the WebSocket + // object’s url's origin, and the data attribute initialized to dataForEvent. + fireEvent('message', ws, MessageEvent, { + origin: ws[kWebSocketURL].origin, + data: dataForEvent + }) +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455 + * @see https://datatracker.ietf.org/doc/html/rfc2616 + * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 + * @param {string} protocol + */ +function isValidSubprotocol (protocol) { + // If present, this value indicates one + // or more comma-separated subprotocol the client wishes to speak, + // ordered by preference. The elements that comprise this value + // MUST be non-empty strings with characters in the range U+0021 to + // U+007E not including separator characters as defined in + // [RFC2616] and MUST all be unique strings. + if (protocol.length === 0) { + return false + } + + for (const char of protocol) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || + code > 0x7E || + char === '(' || + char === ')' || + char === '<' || + char === '>' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' || + code === 32 || // SP + code === 9 // HT + ) { + return false + } } - async [kClose] () { - await promisify(this[kOriginalClose])() - this[kConnected] = 0 - this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) - } + return true } -module.exports = MockClient - - -/***/ }), +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 + * @param {number} code + */ +function isValidStatusCode (code) { + if (code >= 1000 && code < 1015) { + return ( + code !== 1004 && // reserved + code !== 1005 && // "MUST NOT be set as a status code" + code !== 1006 // "MUST NOT be set as a status code" + ) + } -/***/ 888: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return code >= 3000 && code <= 4999 +} -"use strict"; +/** + * @param {import('./websocket').WebSocket} ws + * @param {string|undefined} reason + */ +function failWebsocketConnection (ws, reason) { + const { [kController]: controller, [kResponse]: response } = ws + controller.abort() -const { UndiciError } = __nccwpck_require__(8045) + if (response?.socket && !response.socket.destroyed) { + response.socket.destroy() + } -class MockNotMatchedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, MockNotMatchedError) - this.name = 'MockNotMatchedError' - this.message = message || 'The request does not match any registered mock dispatches' - this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' + if (reason) { + fireEvent('error', ws, ErrorEvent, { + error: new Error(reason) + }) } } module.exports = { - MockNotMatchedError + isEstablished, + isClosing, + isClosed, + fireEvent, + isValidSubprotocol, + isValidStatusCode, + failWebsocketConnection, + websocketMessageReceived } /***/ }), -/***/ 410: +/***/ 54284: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(9323) +const { webidl } = __nccwpck_require__(21744) +const { DOMException } = __nccwpck_require__(41037) +const { URLSerializer } = __nccwpck_require__(685) +const { getGlobalOrigin } = __nccwpck_require__(71246) +const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(19188) const { - kDispatches, - kDispatchKey, - kDefaultHeaders, - kDefaultTrailers, - kContentLength, - kMockDispatch -} = __nccwpck_require__(4347) -const { InvalidArgumentError } = __nccwpck_require__(8045) -const { buildURL } = __nccwpck_require__(3983) + kWebSocketURL, + kReadyState, + kController, + kBinaryType, + kResponse, + kSentClose, + kByteParser +} = __nccwpck_require__(37578) +const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(25515) +const { establishWebSocketConnection } = __nccwpck_require__(35354) +const { WebsocketFrameSend } = __nccwpck_require__(25444) +const { ByteParser } = __nccwpck_require__(11688) +const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(83983) +const { getGlobalDispatcher } = __nccwpck_require__(21892) +const { types } = __nccwpck_require__(73837) -/** - * Defines the scope API for an interceptor reply - */ -class MockScope { - constructor (mockDispatch) { - this[kMockDispatch] = mockDispatch +let experimentalWarned = false + +// https://websockets.spec.whatwg.org/#interface-definition +class WebSocket extends EventTarget { + #events = { + open: null, + error: null, + close: null, + message: null } + #bufferedAmount = 0 + #protocol = '' + #extensions = '' + /** - * Delay a reply by a set amount in ms. + * @param {string} url + * @param {string|string[]} protocols */ - delay (waitInMs) { - if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { - throw new InvalidArgumentError('waitInMs must be a valid integer > 0') + constructor (url, protocols = []) { + super() + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) + + if (!experimentalWarned) { + experimentalWarned = true + process.emitWarning('WebSockets are experimental, expect them to change at any time.', { + code: 'UNDICI-WS' + }) } - this[kMockDispatch].delay = waitInMs - return this - } + const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) - /** - * For a defined reply, never mark as consumed. - */ - persist () { - this[kMockDispatch].persist = true - return this - } + url = webidl.converters.USVString(url) + protocols = options.protocols - /** - * Allow one to define a reply for a set amount of matching requests. - */ - times (repeatTimes) { - if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { - throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') - } + // 1. Let baseURL be this's relevant settings object's API base URL. + const baseURL = getGlobalOrigin() - this[kMockDispatch].times = repeatTimes - return this - } -} + // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. + let urlRecord -/** - * Defines an interceptor for a Mock - */ -class MockInterceptor { - constructor (opts, mockDispatches) { - if (typeof opts !== 'object') { - throw new InvalidArgumentError('opts must be an object') + try { + urlRecord = new URL(url, baseURL) + } catch (e) { + // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. + throw new DOMException(e, 'SyntaxError') } - if (typeof opts.path === 'undefined') { - throw new InvalidArgumentError('opts.path must be defined') + + // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". + if (urlRecord.protocol === 'http:') { + urlRecord.protocol = 'ws:' + } else if (urlRecord.protocol === 'https:') { + // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". + urlRecord.protocol = 'wss:' } - if (typeof opts.method === 'undefined') { - opts.method = 'GET' + + // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. + if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { + throw new DOMException( + `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, + 'SyntaxError' + ) } - // See https://github.com/nodejs/undici/issues/1245 - // As per RFC 3986, clients are not supposed to send URI - // fragments to servers when they retrieve a document, - if (typeof opts.path === 'string') { - if (opts.query) { - opts.path = buildURL(opts.path, opts.query) - } else { - // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 - const parsedURL = new URL(opts.path, 'data://') - opts.path = parsedURL.pathname + parsedURL.search - } + + // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" + // DOMException. + if (urlRecord.hash || urlRecord.href.endsWith('#')) { + throw new DOMException('Got fragment', 'SyntaxError') } - if (typeof opts.method === 'string') { - opts.method = opts.method.toUpperCase() + + // 8. If protocols is a string, set protocols to a sequence consisting + // of just that string. + if (typeof protocols === 'string') { + protocols = [protocols] } - this[kDispatchKey] = buildKey(opts) - this[kDispatches] = mockDispatches - this[kDefaultHeaders] = {} - this[kDefaultTrailers] = {} - this[kContentLength] = false - } + // 9. If any of the values in protocols occur more than once or otherwise + // fail to match the requirements for elements that comprise the value + // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket + // protocol, then throw a "SyntaxError" DOMException. + if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } - createMockScopeDispatchData (statusCode, data, responseOptions = {}) { - const responseData = getResponseData(data) - const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} - const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } - const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } + if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } - return { statusCode, data, headers, trailers } - } + // 10. Set this's url to urlRecord. + this[kWebSocketURL] = new URL(urlRecord.href) - validateReplyParameters (statusCode, data, responseOptions) { - if (typeof statusCode === 'undefined') { - throw new InvalidArgumentError('statusCode must be defined') - } - if (typeof data === 'undefined') { - throw new InvalidArgumentError('data must be defined') - } - if (typeof responseOptions !== 'object') { - throw new InvalidArgumentError('responseOptions must be an object') - } - } + // 11. Let client be this's relevant settings object. - /** - * Mock an undici request with a defined reply. - */ - reply (replyData) { - // Values of reply aren't available right now as they - // can only be available when the reply callback is invoked. - if (typeof replyData === 'function') { - // We'll first wrap the provided callback in another function, - // this function will properly resolve the data from the callback - // when invoked. - const wrappedDefaultsCallback = (opts) => { - // Our reply options callback contains the parameter for statusCode, data and options. - const resolvedData = replyData(opts) + // 12. Run this step in parallel: - // Check if it is in the right format - if (typeof resolvedData !== 'object') { - throw new InvalidArgumentError('reply options callback must return an object') - } + // 1. Establish a WebSocket connection given urlRecord, protocols, + // and client. + this[kController] = establishWebSocketConnection( + urlRecord, + protocols, + this, + (response) => this.#onConnectionEstablished(response), + options + ) - const { statusCode, data = '', responseOptions = {} } = resolvedData - this.validateReplyParameters(statusCode, data, responseOptions) - // Since the values can be obtained immediately we return them - // from this higher order function that will be resolved later. - return { - ...this.createMockScopeDispatchData(statusCode, data, responseOptions) - } - } + // Each WebSocket object has an associated ready state, which is a + // number representing the state of the connection. Initially it must + // be CONNECTING (0). + this[kReadyState] = WebSocket.CONNECTING - // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. - const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) - return new MockScope(newMockDispatch) - } + // The extensions attribute must initially return the empty string. - // We can have either one or three parameters, if we get here, - // we should have 1-3 parameters. So we spread the arguments of - // this function to obtain the parameters, since replyData will always - // just be the statusCode. - const [statusCode, data = '', responseOptions = {}] = [...arguments] - this.validateReplyParameters(statusCode, data, responseOptions) + // The protocol attribute must initially return the empty string. - // Send in-already provided data like usual - const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) - const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) - return new MockScope(newMockDispatch) + // Each WebSocket object has an associated binary type, which is a + // BinaryType. Initially it must be "blob". + this[kBinaryType] = 'blob' } /** - * Mock an undici request with a defined error. + * @see https://websockets.spec.whatwg.org/#dom-websocket-close + * @param {number|undefined} code + * @param {string|undefined} reason */ - replyWithError (error) { - if (typeof error === 'undefined') { - throw new InvalidArgumentError('error must be defined') + close (code = undefined, reason = undefined) { + webidl.brandCheck(this, WebSocket) + + if (code !== undefined) { + code = webidl.converters['unsigned short'](code, { clamp: true }) } - const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) - return new MockScope(newMockDispatch) - } + if (reason !== undefined) { + reason = webidl.converters.USVString(reason) + } - /** - * Set default reply headers on the interceptor for subsequent replies - */ - defaultReplyHeaders (headers) { - if (typeof headers === 'undefined') { - throw new InvalidArgumentError('headers must be defined') + // 1. If code is present, but is neither an integer equal to 1000 nor an + // integer in the range 3000 to 4999, inclusive, throw an + // "InvalidAccessError" DOMException. + if (code !== undefined) { + if (code !== 1000 && (code < 3000 || code > 4999)) { + throw new DOMException('invalid code', 'InvalidAccessError') + } } - this[kDefaultHeaders] = headers - return this - } + let reasonByteLength = 0 - /** - * Set default reply trailers on the interceptor for subsequent replies - */ - defaultReplyTrailers (trailers) { - if (typeof trailers === 'undefined') { - throw new InvalidArgumentError('trailers must be defined') + // 2. If reason is present, then run these substeps: + if (reason !== undefined) { + // 1. Let reasonBytes be the result of encoding reason. + // 2. If reasonBytes is longer than 123 bytes, then throw a + // "SyntaxError" DOMException. + reasonByteLength = Buffer.byteLength(reason) + + if (reasonByteLength > 123) { + throw new DOMException( + `Reason must be less than 123 bytes; received ${reasonByteLength}`, + 'SyntaxError' + ) + } } - this[kDefaultTrailers] = trailers - return this - } + // 3. Run the first matching steps from the following list: + if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(this)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(this, 'Connection was closed before it was established.') + this[kReadyState] = WebSocket.CLOSING + } else if (!isClosing(this)) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. - /** - * Set reply content length header for replies on the interceptor - */ - replyContentLength () { - this[kContentLength] = true - return this - } -} + const frame = new WebsocketFrameSend() -module.exports.MockInterceptor = MockInterceptor -module.exports.MockScope = MockScope + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket -/***/ }), + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + this[kSentClose] = true + } + }) -/***/ 6193: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + this[kReadyState] = WebSocket.CLOSING + } + } -"use strict"; + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-send + * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data + */ + send (data) { + webidl.brandCheck(this, WebSocket) + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) -const { promisify } = __nccwpck_require__(3837) -const Pool = __nccwpck_require__(4634) -const { buildMockDispatch } = __nccwpck_require__(9323) -const { - kDispatches, - kMockAgent, - kClose, - kOriginalClose, - kOrigin, - kOriginalDispatch, - kConnected -} = __nccwpck_require__(4347) -const { MockInterceptor } = __nccwpck_require__(410) -const Symbols = __nccwpck_require__(2785) -const { InvalidArgumentError } = __nccwpck_require__(8045) + data = webidl.converters.WebSocketSendData(data) -/** - * MockPool provides an API that extends the Pool to influence the mockDispatches. - */ -class MockPool extends Pool { - constructor (origin, opts) { - super(origin, opts) + // 1. If this's ready state is CONNECTING, then throw an + // "InvalidStateError" DOMException. + if (this[kReadyState] === WebSocket.CONNECTING) { + throw new DOMException('Sent before connected.', 'InvalidStateError') + } - if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { - throw new InvalidArgumentError('Argument opts.agent must implement Agent') + // 2. Run the appropriate set of steps from the following list: + // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + + if (!isEstablished(this) || isClosing(this)) { + return } - this[kMockAgent] = opts.agent - this[kOrigin] = origin - this[kDispatches] = [] - this[kConnected] = 1 - this[kOriginalDispatch] = this.dispatch - this[kOriginalClose] = this.close.bind(this) + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket - this.dispatch = buildMockDispatch.call(this) - this.close = this[kClose] - } + // If data is a string + if (typeof data === 'string') { + // If the WebSocket connection is established and the WebSocket + // closing handshake has not yet started, then the user agent + // must send a WebSocket Message comprised of the data argument + // using a text frame opcode; if the data cannot be sent, e.g. + // because it would need to be buffered but the buffer is full, + // the user agent must flag the WebSocket as full and then close + // the WebSocket connection. Any invocation of this method with a + // string argument that does not throw an exception must increase + // the bufferedAmount attribute by the number of bytes needed to + // express the argument as UTF-8. - get [Symbols.kConnected] () { - return this[kConnected] - } + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.TEXT) - /** - * Sets up the base interceptor for mocking replies from undici. - */ - intercept (opts) { - return new MockInterceptor(opts, this[kDispatches]) - } + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (types.isArrayBuffer(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need + // to be buffered but the buffer is full, the user agent must flag + // the WebSocket as full and then close the WebSocket connection. + // The data to be sent is the data stored in the buffer described + // by the ArrayBuffer object. Any invocation of this method with an + // ArrayBuffer argument that does not throw an exception must + // increase the bufferedAmount attribute by the length of the + // ArrayBuffer in bytes. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (ArrayBuffer.isView(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The + // data to be sent is the data stored in the section of the buffer + // described by the ArrayBuffer object that data references. Any + // invocation of this method with this kind of argument that does + // not throw an exception must increase the bufferedAmount attribute + // by the length of data’s buffer in bytes. - async [kClose] () { - await promisify(this[kOriginalClose])() - this[kConnected] = 0 - this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) - } -} + const ab = Buffer.from(data, data.byteOffset, data.byteLength) -module.exports = MockPool + const frame = new WebsocketFrameSend(ab) + const buffer = frame.createFrame(opcodes.BINARY) + this.#bufferedAmount += ab.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= ab.byteLength + }) + } else if (isBlobLike(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The data + // to be sent is the raw data represented by the Blob object. Any + // invocation of this method with a Blob argument that does not throw + // an exception must increase the bufferedAmount attribute by the size + // of the Blob object’s raw data, in bytes. -/***/ }), + const frame = new WebsocketFrameSend() -/***/ 4347: -/***/ ((module) => { + data.arrayBuffer().then((ab) => { + const value = Buffer.from(ab) + frame.frameData = value + const buffer = frame.createFrame(opcodes.BINARY) -"use strict"; + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + }) + } + } + get readyState () { + webidl.brandCheck(this, WebSocket) -module.exports = { - kAgent: Symbol('agent'), - kOptions: Symbol('options'), - kFactory: Symbol('factory'), - kDispatches: Symbol('dispatches'), - kDispatchKey: Symbol('dispatch key'), - kDefaultHeaders: Symbol('default headers'), - kDefaultTrailers: Symbol('default trailers'), - kContentLength: Symbol('content length'), - kMockAgent: Symbol('mock agent'), - kMockAgentSet: Symbol('mock agent set'), - kMockAgentGet: Symbol('mock agent get'), - kMockDispatch: Symbol('mock dispatch'), - kClose: Symbol('close'), - kOriginalClose: Symbol('original agent close'), - kOrigin: Symbol('origin'), - kIsMockActive: Symbol('is mock active'), - kNetConnect: Symbol('net connect'), - kGetNetConnect: Symbol('get net connect'), - kConnected: Symbol('connected') -} + // The readyState getter steps are to return this's ready state. + return this[kReadyState] + } + get bufferedAmount () { + webidl.brandCheck(this, WebSocket) -/***/ }), + return this.#bufferedAmount + } -/***/ 9323: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + get url () { + webidl.brandCheck(this, WebSocket) -"use strict"; + // The url getter steps are to return this's url, serialized. + return URLSerializer(this[kWebSocketURL]) + } + get extensions () { + webidl.brandCheck(this, WebSocket) -const { MockNotMatchedError } = __nccwpck_require__(888) -const { - kDispatches, - kMockAgent, - kOriginalDispatch, - kOrigin, - kGetNetConnect -} = __nccwpck_require__(4347) -const { buildURL, nop } = __nccwpck_require__(3983) -const { STATUS_CODES } = __nccwpck_require__(3685) -const { - types: { - isPromise + return this.#extensions } -} = __nccwpck_require__(3837) -function matchValue (match, value) { - if (typeof match === 'string') { - return match === value - } - if (match instanceof RegExp) { - return match.test(value) + get protocol () { + webidl.brandCheck(this, WebSocket) + + return this.#protocol } - if (typeof match === 'function') { - return match(value) === true + + get onopen () { + webidl.brandCheck(this, WebSocket) + + return this.#events.open } - return false -} -function lowerCaseEntries (headers) { - return Object.fromEntries( - Object.entries(headers).map(([headerName, headerValue]) => { - return [headerName.toLocaleLowerCase(), headerValue] - }) - ) -} + set onopen (fn) { + webidl.brandCheck(this, WebSocket) -/** - * @param {import('../../index').Headers|string[]|Record} headers - * @param {string} key - */ -function getHeaderByName (headers, key) { - if (Array.isArray(headers)) { - for (let i = 0; i < headers.length; i += 2) { - if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { - return headers[i + 1] - } + if (this.#events.open) { + this.removeEventListener('open', this.#events.open) } - return undefined - } else if (typeof headers.get === 'function') { - return headers.get(key) - } else { - return lowerCaseEntries(headers)[key.toLocaleLowerCase()] + if (typeof fn === 'function') { + this.#events.open = fn + this.addEventListener('open', fn) + } else { + this.#events.open = null + } } -} -/** @param {string[]} headers */ -function buildHeadersFromArray (headers) { // fetch HeadersList - const clone = headers.slice() - const entries = [] - for (let index = 0; index < clone.length; index += 2) { - entries.push([clone[index], clone[index + 1]]) - } - return Object.fromEntries(entries) -} + get onerror () { + webidl.brandCheck(this, WebSocket) -function matchHeaders (mockDispatch, headers) { - if (typeof mockDispatch.headers === 'function') { - if (Array.isArray(headers)) { // fetch HeadersList - headers = buildHeadersFromArray(headers) - } - return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) - } - if (typeof mockDispatch.headers === 'undefined') { - return true - } - if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { - return false + return this.#events.error } - for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { - const headerValue = getHeaderByName(headers, matchHeaderName) + set onerror (fn) { + webidl.brandCheck(this, WebSocket) - if (!matchValue(matchHeaderValue, headerValue)) { - return false + if (this.#events.error) { + this.removeEventListener('error', this.#events.error) } - } - return true -} -function safeUrl (path) { - if (typeof path !== 'string') { - return path + if (typeof fn === 'function') { + this.#events.error = fn + this.addEventListener('error', fn) + } else { + this.#events.error = null + } } - const pathSegments = path.split('?') + get onclose () { + webidl.brandCheck(this, WebSocket) - if (pathSegments.length !== 2) { - return path + return this.#events.close } - const qp = new URLSearchParams(pathSegments.pop()) - qp.sort() - return [...pathSegments, qp.toString()].join('?') -} + set onclose (fn) { + webidl.brandCheck(this, WebSocket) -function matchKey (mockDispatch, { path, method, body, headers }) { - const pathMatch = matchValue(mockDispatch.path, path) - const methodMatch = matchValue(mockDispatch.method, method) - const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true - const headersMatch = matchHeaders(mockDispatch, headers) - return pathMatch && methodMatch && bodyMatch && headersMatch -} + if (this.#events.close) { + this.removeEventListener('close', this.#events.close) + } -function getResponseData (data) { - if (Buffer.isBuffer(data)) { - return data - } else if (typeof data === 'object') { - return JSON.stringify(data) - } else { - return data.toString() + if (typeof fn === 'function') { + this.#events.close = fn + this.addEventListener('close', fn) + } else { + this.#events.close = null + } } -} -function getMockDispatch (mockDispatches, key) { - const basePath = key.query ? buildURL(key.path, key.query) : key.path - const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath + get onmessage () { + webidl.brandCheck(this, WebSocket) - // Match path - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) - if (matchedMockDispatches.length === 0) { - throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) + return this.#events.message } - // Match method - matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) - if (matchedMockDispatches.length === 0) { - throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) - } + set onmessage (fn) { + webidl.brandCheck(this, WebSocket) - // Match body - matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) - if (matchedMockDispatches.length === 0) { - throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) - } + if (this.#events.message) { + this.removeEventListener('message', this.#events.message) + } - // Match headers - matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) - if (matchedMockDispatches.length === 0) { - throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + if (typeof fn === 'function') { + this.#events.message = fn + this.addEventListener('message', fn) + } else { + this.#events.message = null + } } - return matchedMockDispatches[0] -} - -function addMockDispatch (mockDispatches, key, data) { - const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } - const replyData = typeof data === 'function' ? { callback: data } : { ...data } - const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } - mockDispatches.push(newMockDispatch) - return newMockDispatch -} + get binaryType () { + webidl.brandCheck(this, WebSocket) -function deleteMockDispatch (mockDispatches, key) { - const index = mockDispatches.findIndex(dispatch => { - if (!dispatch.consumed) { - return false - } - return matchKey(dispatch, key) - }) - if (index !== -1) { - mockDispatches.splice(index, 1) + return this[kBinaryType] } -} -function buildKey (opts) { - const { path, method, body, headers, query } = opts - return { - path, - method, - body, - headers, - query + set binaryType (type) { + webidl.brandCheck(this, WebSocket) + + if (type !== 'blob' && type !== 'arraybuffer') { + this[kBinaryType] = 'blob' + } else { + this[kBinaryType] = type + } } -} -function generateKeyValues (data) { - return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ - ...keyValuePairs, - Buffer.from(`${key}`), - Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) - ], []) -} + /** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + */ + #onConnectionEstablished (response) { + // processResponse is called when the "response’s header list has been received and initialized." + // once this happens, the connection is open + this[kResponse] = response -/** - * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status - * @param {number} statusCode - */ -function getStatusText (statusCode) { - return STATUS_CODES[statusCode] || 'unknown' -} + const parser = new ByteParser(this) + parser.on('drain', function onParserDrain () { + this.ws[kResponse].socket.resume() + }) -async function getResponse (body) { - const buffers = [] - for await (const data of body) { - buffers.push(data) - } - return Buffer.concat(buffers).toString('utf8') -} + response.socket.ws = this + this[kByteParser] = parser -/** - * Mock dispatch function used to simulate undici dispatches - */ -function mockDispatch (opts, handler) { - // Get mock dispatch from built key - const key = buildKey(opts) - const mockDispatch = getMockDispatch(this[kDispatches], key) + // 1. Change the ready state to OPEN (1). + this[kReadyState] = states.OPEN - mockDispatch.timesInvoked++ + // 2. Change the extensions attribute’s value to the extensions in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 + const extensions = response.headersList.get('sec-websocket-extensions') - // Here's where we resolve a callback if a callback is present for the dispatch data. - if (mockDispatch.data.callback) { - mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } - } + if (extensions !== null) { + this.#extensions = extensions + } - // Parse mockDispatch data - const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch - const { timesInvoked, times } = mockDispatch + // 3. Change the protocol attribute’s value to the subprotocol in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 + const protocol = response.headersList.get('sec-websocket-protocol') - // If it's used up and not persistent, mark as consumed - mockDispatch.consumed = !persist && timesInvoked >= times - mockDispatch.pending = timesInvoked < times + if (protocol !== null) { + this.#protocol = protocol + } - // If specified, trigger dispatch error - if (error !== null) { - deleteMockDispatch(this[kDispatches], key) - handler.onError(error) - return true + // 4. Fire an event named open at the WebSocket object. + fireEvent('open', this) } +} - // Handle the request with a delay if necessary - if (typeof delay === 'number' && delay > 0) { - setTimeout(() => { - handleReply(this[kDispatches]) - }, delay) - } else { - handleReply(this[kDispatches]) - } +// https://websockets.spec.whatwg.org/#dom-websocket-connecting +WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING +// https://websockets.spec.whatwg.org/#dom-websocket-open +WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN +// https://websockets.spec.whatwg.org/#dom-websocket-closing +WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING +// https://websockets.spec.whatwg.org/#dom-websocket-closed +WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED - function handleReply (mockDispatches, _data = data) { - // fetch's HeadersList is a 1D string array - const optsHeaders = Array.isArray(opts.headers) - ? buildHeadersFromArray(opts.headers) - : opts.headers - const body = typeof _data === 'function' - ? _data({ ...opts, headers: optsHeaders }) - : _data +Object.defineProperties(WebSocket.prototype, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors, + url: kEnumerableProperty, + readyState: kEnumerableProperty, + bufferedAmount: kEnumerableProperty, + onopen: kEnumerableProperty, + onerror: kEnumerableProperty, + onclose: kEnumerableProperty, + close: kEnumerableProperty, + onmessage: kEnumerableProperty, + binaryType: kEnumerableProperty, + send: kEnumerableProperty, + extensions: kEnumerableProperty, + protocol: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'WebSocket', + writable: false, + enumerable: false, + configurable: true + } +}) - // util.types.isPromise is likely needed for jest. - if (isPromise(body)) { - // If handleReply is asynchronous, throwing an error - // in the callback will reject the promise, rather than - // synchronously throw the error, which breaks some tests. - // Rather, we wait for the callback to resolve if it is a - // promise, and then re-run handleReply with the new body. - body.then((newData) => handleReply(mockDispatches, newData)) - return - } +Object.defineProperties(WebSocket, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors +}) - const responseData = getResponseData(body) - const responseHeaders = generateKeyValues(headers) - const responseTrailers = generateKeyValues(trailers) +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.DOMString +) - handler.abort = nop - handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) - handler.onData(Buffer.from(responseData)) - handler.onComplete(responseTrailers) - deleteMockDispatch(mockDispatches, key) +webidl.converters['DOMString or sequence'] = function (V) { + if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { + return webidl.converters['sequence'](V) } - function resume () {} - - return true + return webidl.converters.DOMString(V) } -function buildMockDispatch () { - const agent = this[kMockAgent] - const origin = this[kOrigin] - const originalDispatch = this[kOriginalDispatch] - - return function dispatch (opts, handler) { - if (agent.isMockActive) { - try { - mockDispatch.call(this, opts, handler) - } catch (error) { - if (error instanceof MockNotMatchedError) { - const netConnect = agent[kGetNetConnect]() - if (netConnect === false) { - throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) - } - if (checkNetConnect(netConnect, origin)) { - originalDispatch.call(this, opts, handler) - } else { - throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) - } - } else { - throw error - } - } - } else { - originalDispatch.call(this, opts, handler) +// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 +webidl.converters.WebSocketInit = webidl.dictionaryConverter([ + { + key: 'protocols', + converter: webidl.converters['DOMString or sequence'], + get defaultValue () { + return [] + } + }, + { + key: 'dispatcher', + converter: (V) => V, + get defaultValue () { + return getGlobalDispatcher() } + }, + { + key: 'headers', + converter: webidl.nullableConverter(webidl.converters.HeadersInit) } -} +]) -function checkNetConnect (netConnect, origin) { - const url = new URL(origin) - if (netConnect === true) { - return true - } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { - return true +webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { + if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { + return webidl.converters.WebSocketInit(V) } - return false + + return { protocols: webidl.converters['DOMString or sequence'](V) } } -function buildMockOptions (opts) { - if (opts) { - const { agent, ...mockOptions } = opts - return mockOptions +webidl.converters.WebSocketSendData = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + return webidl.converters.BufferSource(V) + } } + + return webidl.converters.USVString(V) } module.exports = { - getResponseData, - getMockDispatch, - addMockDispatch, - deleteMockDispatch, - buildKey, - generateKeyValues, - matchValue, - getResponse, - getStatusText, - mockDispatch, - buildMockDispatch, - checkNetConnect, - buildMockOptions, - getHeaderByName + WebSocket } /***/ }), -/***/ 6823: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 45030: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -const { Transform } = __nccwpck_require__(2781) -const { Console } = __nccwpck_require__(6206) +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; + } + + if (typeof process === "object" && "version" in process) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + } + + return ""; +} + +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 65278: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** - * Gets the output of `console.table(…)` as a string. + * For Node.js, simply re-export the core `util.deprecate` function. */ -module.exports = class PendingInterceptorsFormatter { - constructor ({ disableColors } = {}) { - this.transform = new Transform({ - transform (chunk, _enc, cb) { - cb(null, chunk) - } - }) - this.logger = new Console({ - stdout: this.transform, - inspectOptions: { - colors: !disableColors && !process.env.CI - } - }) - } +module.exports = __nccwpck_require__(73837).deprecate; - format (pendingInterceptors) { - const withPrettyHeaders = pendingInterceptors.map( - ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ - Method: method, - Origin: origin, - Path: path, - 'Status code': statusCode, - Persistent: persist ? '✅' : '❌', - Invocations: timesInvoked, - Remaining: persist ? Infinity : times - timesInvoked - })) - this.logger.table(withPrettyHeaders) - return this.transform.read().toString() +/***/ }), + +/***/ 75840: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; } -} +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +var _v = _interopRequireDefault(__nccwpck_require__(78628)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(86409)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(85122)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(79120)); + +var _nil = _interopRequireDefault(__nccwpck_require__(25332)); + +var _version = _interopRequireDefault(__nccwpck_require__(32414)); + +var _validate = _interopRequireDefault(__nccwpck_require__(66900)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(18950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(62746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), -/***/ 8891: -/***/ ((module) => { +/***/ 4569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const singulars = { - pronoun: 'it', - is: 'is', - was: 'was', - this: 'this' -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -const plurals = { - pronoun: 'they', - is: 'are', - was: 'were', - this: 'these' -} +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); -module.exports = class Pluralizer { - constructor (singular, plural) { - this.singular = singular - this.plural = plural - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - pluralize (count) { - const one = count === 1 - const keys = one ? singulars : plurals - const noun = one ? this.singular : this.plural - return { ...keys, count, noun } +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); } + + return _crypto.default.createHash('md5').update(bytes).digest(); } +var _default = md5; +exports["default"] = _default; /***/ }), -/***/ 8266: -/***/ ((module) => { +/***/ 82054: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* eslint-disable */ +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -// Extracted from node/lib/internal/fixed_queue.js +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); -// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. -const kSize = 2048; -const kMask = kSize - 1; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -// The FixedQueue is implemented as a singly-linked list of fixed-size -// circular buffers. It looks something like this: -// -// head tail -// | | -// v v -// +-----------+ <-----\ +-----------+ <------\ +-----------+ -// | [null] | \----- | next | \------- | next | -// +-----------+ +-----------+ +-----------+ -// | item | <-- bottom | item | <-- bottom | [empty] | -// | item | | item | | [empty] | -// | item | | item | | [empty] | -// | item | | item | | [empty] | -// | item | | item | bottom --> | item | -// | item | | item | | item | -// | ... | | ... | | ... | -// | item | | item | | item | -// | item | | item | | item | -// | [empty] | <-- top | item | | item | -// | [empty] | | item | | item | -// | [empty] | | [empty] | <-- top top --> | [empty] | -// +-----------+ +-----------+ +-----------+ -// -// Or, if there is only one circular buffer, it looks something -// like either of these: -// -// head tail head tail -// | | | | -// v v v v -// +-----------+ +-----------+ -// | [null] | | [null] | -// +-----------+ +-----------+ -// | [empty] | | item | -// | [empty] | | item | -// | item | <-- bottom top --> | [empty] | -// | item | | [empty] | -// | [empty] | <-- top bottom --> | item | -// | [empty] | | item | -// +-----------+ +-----------+ -// -// Adding a value means moving `top` forward by one, removing means -// moving `bottom` forward by one. After reaching the end, the queue -// wraps around. -// -// When `top === bottom` the current queue is empty and when -// `top + 1 === bottom` it's full. This wastes a single space of storage -// but allows much quicker checks. +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; -class FixedCircularBuffer { - constructor() { - this.bottom = 0; - this.top = 0; - this.list = new Array(kSize); - this.next = null; - } +/***/ }), - isEmpty() { - return this.top === this.bottom; - } +/***/ 25332: +/***/ ((__unused_webpack_module, exports) => { - isFull() { - return ((this.top + 1) & kMask) === this.bottom; - } +"use strict"; - push(data) { - this.list[this.top] = data; - this.top = (this.top + 1) & kMask; - } - shift() { - const nextItem = this.list[this.bottom]; - if (nextItem === undefined) - return null; - this.list[this.bottom] = undefined; - this.bottom = (this.bottom + 1) & kMask; - return nextItem; - } -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; -module.exports = class FixedQueue { - constructor() { - this.head = this.tail = new FixedCircularBuffer(); - } +/***/ }), - isEmpty() { - return this.head.isEmpty(); - } +/***/ 62746: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - push(data) { - if (this.head.isFull()) { - // Head is full: Creates a new queue, sets the old queue's `.next` to it, - // and sets it as the new main queue. - this.head = this.head.next = new FixedCircularBuffer(); - } - this.head.push(data); - } +"use strict"; - shift() { - const tail = this.tail; - const next = tail.shift(); - if (tail.isEmpty() && tail.next !== null) { - // If there is another queue, it forms the new tail. - this.tail = tail.next; - } - return next; + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(66900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); } -}; + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; /***/ }), -/***/ 3198: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 40814: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -const DispatcherBase = __nccwpck_require__(4839) -const FixedQueue = __nccwpck_require__(8266) -const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(2785) -const PoolStats = __nccwpck_require__(9689) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; -const kClients = Symbol('clients') -const kNeedDrain = Symbol('needDrain') -const kQueue = Symbol('queue') -const kClosedResolve = Symbol('closed resolve') -const kOnDrain = Symbol('onDrain') -const kOnConnect = Symbol('onConnect') -const kOnDisconnect = Symbol('onDisconnect') -const kOnConnectionError = Symbol('onConnectionError') -const kGetDispatcher = Symbol('get dispatcher') -const kAddClient = Symbol('add client') -const kRemoveClient = Symbol('remove client') -const kStats = Symbol('stats') +/***/ }), -class PoolBase extends DispatcherBase { - constructor () { - super() +/***/ 50807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this[kQueue] = new FixedQueue() - this[kClients] = [] - this[kQueued] = 0 +"use strict"; - const pool = this - this[kOnDrain] = function onDrain (origin, targets) { - const queue = pool[kQueue] +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; - let needDrain = false +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - while (!needDrain) { - const item = queue.shift() - if (!item) { - break - } - pool[kQueued]-- - needDrain = !this.dispatch(item.opts, item.handler) - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - this[kNeedDrain] = needDrain +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - if (!this[kNeedDrain] && pool[kNeedDrain]) { - pool[kNeedDrain] = false - pool.emit('drain', origin, [pool, ...targets]) - } +let poolPtr = rnds8Pool.length; - if (pool[kClosedResolve] && queue.isEmpty()) { - Promise - .all(pool[kClients].map(c => c.close())) - .then(pool[kClosedResolve]) - } - } +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); - this[kOnConnect] = (origin, targets) => { - pool.emit('connect', origin, [pool, ...targets]) - } + poolPtr = 0; + } - this[kOnDisconnect] = (origin, targets, err) => { - pool.emit('disconnect', origin, [pool, ...targets], err) - } + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} - this[kOnConnectionError] = (origin, targets, err) => { - pool.emit('connectionError', origin, [pool, ...targets], err) - } +/***/ }), - this[kStats] = new PoolStats(this) - } +/***/ 85274: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - get [kBusy] () { - return this[kNeedDrain] - } +"use strict"; - get [kConnected] () { - return this[kClients].filter(client => client[kConnected]).length - } - get [kFree] () { - return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - get [kPending] () { - let ret = this[kQueued] - for (const { [kPending]: pending } of this[kClients]) { - ret += pending - } - return ret - } +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - get [kRunning] () { - let ret = 0 - for (const { [kRunning]: running } of this[kClients]) { - ret += running - } - return ret - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - get [kSize] () { - let ret = this[kQueued] - for (const { [kSize]: size } of this[kClients]) { - ret += size - } - return ret +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); } - get stats () { - return this[kStats] - } + return _crypto.default.createHash('sha1').update(bytes).digest(); +} - async [kClose] () { - if (this[kQueue].isEmpty()) { - return Promise.all(this[kClients].map(c => c.close())) - } else { - return new Promise((resolve) => { - this[kClosedResolve] = resolve - }) - } - } +var _default = sha1; +exports["default"] = _default; - async [kDestroy] (err) { - while (true) { - const item = this[kQueue].shift() - if (!item) { - break - } - item.handler.onError(err) - } +/***/ }), - return Promise.all(this[kClients].map(c => c.destroy(err))) - } +/***/ 18950: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; - [kDispatch] (opts, handler) { - const dispatcher = this[kGetDispatcher]() - if (!dispatcher) { - this[kNeedDrain] = true - this[kQueue].push({ opts, handler }) - this[kQueued]++ - } else if (!dispatcher.dispatch(opts, handler)) { - dispatcher[kNeedDrain] = true - this[kNeedDrain] = !this[kGetDispatcher]() - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; - return !this[kNeedDrain] - } +var _validate = _interopRequireDefault(__nccwpck_require__(66900)); - [kAddClient] (client) { - client - .on('drain', this[kOnDrain]) - .on('connect', this[kOnConnect]) - .on('disconnect', this[kOnDisconnect]) - .on('connectionError', this[kOnConnectionError]) +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - this[kClients].push(client) +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; - if (this[kNeedDrain]) { - process.nextTick(() => { - if (this[kNeedDrain]) { - this[kOnDrain](client[kUrl], [this, client]) - } - }) - } +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} - return this - } +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} - [kRemoveClient] (client) { - client.close(() => { - const idx = this[kClients].indexOf(client) - if (idx !== -1) { - this[kClients].splice(idx, 1) - } - }) +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields - this[kNeedDrain] = this[kClients].some(dispatcher => ( - !dispatcher[kNeedDrain] && - dispatcher.closed !== true && - dispatcher.destroyed !== true - )) + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); } -} -module.exports = { - PoolBase, - kClients, - kNeedDrain, - kAddClient, - kRemoveClient, - kGetDispatcher + return uuid; } +var _default = stringify; +exports["default"] = _default; /***/ }), -/***/ 9689: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 78628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(2785) -const kPool = Symbol('pool') +"use strict"; -class PoolStats { - constructor (pool) { - this[kPool] = pool - } - get connected () { - return this[kPool][kConnected] - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - get free () { - return this[kPool][kFree] - } +var _rng = _interopRequireDefault(__nccwpck_require__(50807)); - get pending () { - return this[kPool][kPending] - } +var _stringify = __nccwpck_require__(18950); - get queued () { - return this[kPool][kQueued] - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - get running () { - return this[kPool][kRunning] - } +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; - get size () { - return this[kPool][kSize] - } -} +let _clockseq; // Previous uuid creation time -module.exports = PoolStats +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details -/***/ }), +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 -/***/ 4634: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); -"use strict"; + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. -const { - PoolBase, - kClients, - kNeedDrain, - kAddClient, - kGetDispatcher -} = __nccwpck_require__(3198) -const Client = __nccwpck_require__(3598) -const { - InvalidArgumentError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { kUrl, kInterceptors } = __nccwpck_require__(2785) -const buildConnector = __nccwpck_require__(2067) -const kOptions = Symbol('options') -const kConnections = Symbol('connections') -const kFactory = Symbol('factory') + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock -function defaultFactory (origin, opts) { - return new Client(origin, opts) -} + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) -class Pool extends PoolBase { - constructor (origin, { - connections, - factory = defaultFactory, - connect, - connectTimeout, - tls, - maxCachedSessions, - socketPath, - autoSelectFamily, - autoSelectFamilyAttemptTimeout, - allowH2, - ...options - } = {}) { - super() + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - if (connections != null && (!Number.isFinite(connections) || connections < 0)) { - throw new InvalidArgumentError('invalid connections') - } + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval - if (typeof factory !== 'function') { - throw new InvalidArgumentError('factory must be a function.') - } - if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { - throw new InvalidArgumentError('connect must be a function or an object') - } + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested - if (typeof connect !== 'function') { - connect = buildConnector({ - ...tls, - maxCachedSessions, - allowH2, - socketPath, - timeout: connectTimeout, - ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), - ...connect - }) - } - this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) - ? options.interceptors.Pool - : [] - this[kConnections] = connections || null - this[kUrl] = util.parseOrigin(origin) - this[kOptions] = { ...util.deepClone(options), connect, allowH2 } - this[kOptions].interceptors = options.interceptors - ? { ...options.interceptors } - : undefined - this[kFactory] = factory + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } - [kGetDispatcher] () { - let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - if (dispatcher) { - return dispatcher - } + msecs += 12219292800000; // `time_low` - if (!this[kConnections] || this[kClients].length < this[kConnections]) { - dispatcher = this[kFactory](this[kUrl], this[kOptions]) - this[kAddClient](dispatcher) - } + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` - return dispatcher + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; } -} -module.exports = Pool + return buf || (0, _stringify.unsafeStringify)(b); +} +var _default = v1; +exports["default"] = _default; /***/ }), -/***/ 7858: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 86409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(2785) -const { URL } = __nccwpck_require__(7310) -const Agent = __nccwpck_require__(7890) -const Pool = __nccwpck_require__(4634) -const DispatcherBase = __nccwpck_require__(4839) -const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(8045) -const buildConnector = __nccwpck_require__(2067) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -const kAgent = Symbol('proxy agent') -const kClient = Symbol('proxy client') -const kProxyHeaders = Symbol('proxy headers') -const kRequestTls = Symbol('request tls settings') -const kProxyTls = Symbol('proxy tls settings') -const kConnectEndpoint = Symbol('connect endpoint function') +var _v = _interopRequireDefault(__nccwpck_require__(65998)); -function defaultProtocolPort (protocol) { - return protocol === 'https:' ? 443 : 80 -} +var _md = _interopRequireDefault(__nccwpck_require__(4569)); -function buildProxyOptions (opts) { - if (typeof opts === 'string') { - opts = { uri: opts } - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - if (!opts || !opts.uri) { - throw new InvalidArgumentError('Proxy opts.uri is mandatory') - } +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; - return { - uri: opts.uri, - protocol: opts.protocol || 'https' - } -} +/***/ }), -function defaultFactory (origin, opts) { - return new Pool(origin, opts) -} +/***/ 65998: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -class ProxyAgent extends DispatcherBase { - constructor (opts) { - super(opts) - this[kProxy] = buildProxyOptions(opts) - this[kAgent] = new Agent(opts) - this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) - ? opts.interceptors.ProxyAgent - : [] +"use strict"; - if (typeof opts === 'string') { - opts = { uri: opts } - } - if (!opts || !opts.uri) { - throw new InvalidArgumentError('Proxy opts.uri is mandatory') - } +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; - const { clientFactory = defaultFactory } = opts +var _stringify = __nccwpck_require__(18950); - if (typeof clientFactory !== 'function') { - throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') - } +var _parse = _interopRequireDefault(__nccwpck_require__(62746)); - this[kRequestTls] = opts.requestTls - this[kProxyTls] = opts.proxyTls - this[kProxyHeaders] = opts.headers || {} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - const resolvedUrl = new URL(opts.uri) - const { origin, port, host, username, password } = resolvedUrl +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape - if (opts.auth && opts.token) { - throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') - } else if (opts.auth) { - /* @deprecated in favour of opts.token */ - this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` - } else if (opts.token) { - this[kProxyHeaders]['proxy-authorization'] = opts.token - } else if (username && password) { - this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` - } + const bytes = []; - const connect = buildConnector({ ...opts.proxyTls }) - this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) - this[kClient] = clientFactory(resolvedUrl, { connect }) - this[kAgent] = new Agent({ - ...opts, - connect: async (opts, callback) => { - let requestedHost = opts.host - if (!opts.port) { - requestedHost += `:${defaultProtocolPort(opts.protocol)}` - } - try { - const { socket, statusCode } = await this[kClient].connect({ - origin, - port, - path: requestedHost, - signal: opts.signal, - headers: { - ...this[kProxyHeaders], - host - } - }) - if (statusCode !== 200) { - socket.on('error', () => {}).destroy() - callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) - } - if (opts.protocol !== 'https:') { - callback(null, socket) - return - } - let servername - if (this[kRequestTls]) { - servername = this[kRequestTls].servername - } else { - servername = opts.servername - } - this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) - } catch (err) { - callback(err) - } - } - }) + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); } - dispatch (opts, handler) { - const { host } = new URL(opts.origin) - const headers = buildHeaders(opts.headers) - throwIfProxyAuthIsSent(headers) - return this[kAgent].dispatch( - { - ...opts, - headers: { - ...headers, - host - } - }, - handler - ) - } + return bytes; +} - async [kClose] () { - await this[kAgent].close() - await this[kClient].close() - } +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; - async [kDestroy] () { - await this[kAgent].destroy() - await this[kClient].destroy() - } -} +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; -/** - * @param {string[] | Record} headers - * @returns {Record} - */ -function buildHeaders (headers) { - // When using undici.fetch, the headers list is stored - // as an array. - if (Array.isArray(headers)) { - /** @type {Record} */ - const headersPair = {} + if (typeof value === 'string') { + value = stringToBytes(value); + } - for (let i = 0; i < headers.length; i += 2) { - headersPair[headers[i]] = headers[i + 1] + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); } - return headersPair - } + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` - return headers -} -/** - * @param {Record} headers - * - * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers - * Nevertheless, it was changed and to avoid a security vulnerability by end users - * this check was created. - * It should be removed in the next major version for performance reasons - */ -function throwIfProxyAuthIsSent (headers) { - const existProxyAuth = headers && Object.keys(headers) - .find((key) => key.toLowerCase() === 'proxy-authorization') - if (existProxyAuth) { - throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') - } -} + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; -module.exports = ProxyAgent + if (buf) { + offset = offset || 0; + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } -/***/ }), + return buf; + } -/***/ 9459: -/***/ ((module) => { + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) -"use strict"; + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support -let fastNow = Date.now() -let fastNowTimeout -const fastTimers = [] + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} -function onTimeout () { - fastNow = Date.now() +/***/ }), - let len = fastTimers.length - let idx = 0 - while (idx < len) { - const timer = fastTimers[idx] +/***/ 85122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (timer.state === 0) { - timer.state = fastNow + timer.delay - } else if (timer.state > 0 && fastNow >= timer.state) { - timer.state = -1 - timer.callback(timer.opaque) - } +"use strict"; - if (timer.state === -1) { - timer.state = -2 - if (idx !== len - 1) { - fastTimers[idx] = fastTimers.pop() - } else { - fastTimers.pop() - } - len -= 1 - } else { - idx += 1 - } - } - if (fastTimers.length > 0) { - refreshTimeout() - } -} +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(82054)); -function refreshTimeout () { - if (fastNowTimeout && fastNowTimeout.refresh) { - fastNowTimeout.refresh() - } else { - clearTimeout(fastNowTimeout) - fastNowTimeout = setTimeout(onTimeout, 1e3) - if (fastNowTimeout.unref) { - fastNowTimeout.unref() - } - } -} +var _rng = _interopRequireDefault(__nccwpck_require__(50807)); -class Timeout { - constructor (callback, delay, opaque) { - this.callback = callback - this.delay = delay - this.opaque = opaque +var _stringify = __nccwpck_require__(18950); - // -2 not in timer list - // -1 in timer list but inactive - // 0 in timer list waiting for time - // > 0 in timer list waiting for time to expire - this.state = -2 +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - this.refresh() +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); } - refresh () { - if (this.state === -2) { - fastTimers.push(this) - if (!fastNowTimeout || fastTimers.length === 1) { - refreshTimeout() - } - } + options = options || {}; - this.state = 0 - } + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - clear () { - this.state = -1 - } -} -module.exports = { - setTimeout (callback, delay, opaque) { - return delay < 1e3 - ? setTimeout(callback, delay, opaque) - : new Timeout(callback, delay, opaque) - }, - clearTimeout (timeout) { - if (timeout instanceof Timeout) { - timeout.clear() - } else { - clearTimeout(timeout) + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; } + + return buf; } + + return (0, _stringify.unsafeStringify)(rnds); } +var _default = v4; +exports["default"] = _default; /***/ }), -/***/ 5354: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 79120: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const diagnosticsChannel = __nccwpck_require__(7643) -const { uid, states } = __nccwpck_require__(9188) -const { - kReadyState, - kSentClose, - kByteParser, - kReceivedClose -} = __nccwpck_require__(7578) -const { fireEvent, failWebsocketConnection } = __nccwpck_require__(5515) -const { CloseEvent } = __nccwpck_require__(2611) -const { makeRequest } = __nccwpck_require__(8359) -const { fetching } = __nccwpck_require__(4881) -const { Headers } = __nccwpck_require__(554) -const { getGlobalDispatcher } = __nccwpck_require__(1892) -const { kHeadersList } = __nccwpck_require__(2785) +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -const channels = {} -channels.open = diagnosticsChannel.channel('undici:websocket:open') -channels.close = diagnosticsChannel.channel('undici:websocket:close') -channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') +var _v = _interopRequireDefault(__nccwpck_require__(65998)); -/** @type {import('crypto')} */ -let crypto -try { - crypto = __nccwpck_require__(6113) -} catch { +var _sha = _interopRequireDefault(__nccwpck_require__(85274)); -} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -/** - * @see https://websockets.spec.whatwg.org/#concept-websocket-establish - * @param {URL} url - * @param {string|string[]} protocols - * @param {import('./websocket').WebSocket} ws - * @param {(response: any) => void} onEstablish - * @param {Partial} options - */ -function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { - // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s - // scheme is "ws", and to "https" otherwise. - const requestURL = url +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; - requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' +/***/ }), - // 2. Let request be a new request, whose URL is requestURL, client is client, - // service-workers mode is "none", referrer is "no-referrer", mode is - // "websocket", credentials mode is "include", cache mode is "no-store" , - // and redirect mode is "error". - const request = makeRequest({ - urlList: [requestURL], - serviceWorkers: 'none', - referrer: 'no-referrer', - mode: 'websocket', - credentials: 'include', - cache: 'no-store', - redirect: 'error' - }) +/***/ 66900: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Note: undici extension, allow setting custom headers. - if (options.headers) { - const headersList = new Headers(options.headers)[kHeadersList] +"use strict"; - request.headersList = headersList - } - // 3. Append (`Upgrade`, `websocket`) to request’s header list. - // 4. Append (`Connection`, `Upgrade`) to request’s header list. - // Note: both of these are handled by undici currently. - // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // 5. Let keyValue be a nonce consisting of a randomly selected - // 16-byte value that has been forgiving-base64-encoded and - // isomorphic encoded. - const keyValue = crypto.randomBytes(16).toString('base64') +var _regex = _interopRequireDefault(__nccwpck_require__(40814)); - // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s - // header list. - request.headersList.append('sec-websocket-key', keyValue) +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s - // header list. - request.headersList.append('sec-websocket-version', '13') +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} - // 8. For each protocol in protocols, combine - // (`Sec-WebSocket-Protocol`, protocol) in request’s header - // list. - for (const protocol of protocols) { - request.headersList.append('sec-websocket-protocol', protocol) - } +var _default = validate; +exports["default"] = _default; - // 9. Let permessageDeflate be a user-agent defined - // "permessage-deflate" extension header value. - // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 - // TODO: enable once permessage-deflate is supported - const permessageDeflate = '' // 'permessage-deflate; 15' +/***/ }), - // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to - // request’s header list. - // request.headersList.append('sec-websocket-extensions', permessageDeflate) +/***/ 32414: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 11. Fetch request with useParallelQueue set to true, and - // processResponse given response being these steps: - const controller = fetching({ - request, - useParallelQueue: true, - dispatcher: options.dispatcher ?? getGlobalDispatcher(), - processResponse (response) { - // 1. If response is a network error or its status is not 101, - // fail the WebSocket connection. - if (response.type === 'error' || response.status !== 101) { - failWebsocketConnection(ws, 'Received network error or non-101 status code.') - return - } +"use strict"; - // 2. If protocols is not the empty list and extracting header - // list values given `Sec-WebSocket-Protocol` and response’s - // header list results in null, failure, or the empty byte - // sequence, then fail the WebSocket connection. - if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { - failWebsocketConnection(ws, 'Server did not respond with sent protocols.') - return - } - // 3. Follow the requirements stated step 2 to step 6, inclusive, - // of the last set of steps in section 4.1 of The WebSocket - // Protocol to validate response. This either results in fail - // the WebSocket connection or the WebSocket connection is - // established. +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; - // 2. If the response lacks an |Upgrade| header field or the |Upgrade| - // header field contains a value that is not an ASCII case- - // insensitive match for the value "websocket", the client MUST - // _Fail the WebSocket Connection_. - if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { - failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') - return - } +var _validate = _interopRequireDefault(__nccwpck_require__(66900)); - // 3. If the response lacks a |Connection| header field or the - // |Connection| header field doesn't contain a token that is an - // ASCII case-insensitive match for the value "Upgrade", the client - // MUST _Fail the WebSocket Connection_. - if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { - failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') - return - } +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - // 4. If the response lacks a |Sec-WebSocket-Accept| header field or - // the |Sec-WebSocket-Accept| contains a value other than the - // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- - // Key| (as a string, not base64-decoded) with the string "258EAFA5- - // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and - // trailing whitespace, the client MUST _Fail the WebSocket - // Connection_. - const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') - const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') - if (secWSAccept !== digest) { - failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') - return - } +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } - // 5. If the response includes a |Sec-WebSocket-Extensions| header - // field and this header field indicates the use of an extension - // that was not present in the client's handshake (the server has - // indicated an extension not requested by the client), the client - // MUST _Fail the WebSocket Connection_. (The parsing of this - // header field to determine which extensions are requested is - // discussed in Section 9.1.) - const secExtension = response.headersList.get('Sec-WebSocket-Extensions') + return parseInt(uuid.slice(14, 15), 16); +} - if (secExtension !== null && secExtension !== permessageDeflate) { - failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') - return - } +var _default = version; +exports["default"] = _default; - // 6. If the response includes a |Sec-WebSocket-Protocol| header field - // and this header field indicates the use of a subprotocol that was - // not present in the client's handshake (the server has indicated a - // subprotocol not requested by the client), the client MUST _Fail - // the WebSocket Connection_. - const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') +/***/ }), - if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { - failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') - return - } +/***/ 54886: +/***/ ((module) => { - response.socket.on('data', onSocketData) - response.socket.on('close', onSocketClose) - response.socket.on('error', onSocketError) +"use strict"; - if (channels.open.hasSubscribers) { - channels.open.publish({ - address: response.socket.address(), - protocol: secProtocol, - extensions: secExtension - }) - } - onEstablish(response) - } - }) +var conversions = {}; +module.exports = conversions; - return controller +function sign(x) { + return x < 0 ? -1 : 1; } -/** - * @param {Buffer} chunk - */ -function onSocketData (chunk) { - if (!this.ws[kByteParser].write(chunk)) { - this.pause() - } +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } } -/** - * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol - * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 - */ -function onSocketClose () { - const { ws } = this +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; - // If the TCP connection was closed after the - // WebSocket closing handshake was completed, the WebSocket connection - // is said to have been closed _cleanly_. - const wasClean = ws[kSentClose] && ws[kReceivedClose] + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); - let code = 1005 - let reason = '' + return function(V, opts) { + if (!opts) opts = {}; - const result = ws[kByteParser].closingInfo + let x = +V; - if (result) { - code = result.code ?? 1005 - reason = result.reason - } else if (!ws[kSentClose]) { - // If _The WebSocket - // Connection is Closed_ and no Close control frame was received by the - // endpoint (such as could occur if the underlying transport connection - // is lost), _The WebSocket Connection Close Code_ is considered to be - // 1006. - code = 1006 - } + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } - // 1. Change the ready state to CLOSED (3). - ws[kReadyState] = states.CLOSED + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } - // 2. If the user agent was required to fail the WebSocket - // connection, or if the WebSocket connection was closed - // after being flagged as full, fire an event named error - // at the WebSocket object. - // TODO + return x; + } - // 3. Fire an event named close at the WebSocket object, - // using CloseEvent, with the wasClean attribute - // initialized to true if the connection closed cleanly - // and false otherwise, the code attribute initialized to - // the WebSocket connection close code, and the reason - // attribute initialized to the result of applying UTF-8 - // decode without BOM to the WebSocket connection close - // reason. - fireEvent('close', ws, CloseEvent, { - wasClean, code, reason - }) + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); - if (channels.close.hasSubscribers) { - channels.close.publish({ - websocket: ws, - code, - reason - }) - } -} + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } -function onSocketError (error) { - const { ws } = this + if (!Number.isFinite(x) || x === 0) { + return 0; + } - ws[kReadyState] = states.CLOSING + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; - if (channels.socketError.hasSubscribers) { - channels.socketError.publish(error) - } + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } - this.destroy() + return x; + } } -module.exports = { - establishWebSocketConnection -} +conversions["void"] = function () { + return undefined; +}; +conversions["boolean"] = function (val) { + return !!val; +}; -/***/ }), +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); -/***/ 9188: -/***/ ((module) => { +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); -"use strict"; +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); -// This is a Globally Unique Identifier unique used -// to validate that the endpoint accepts websocket -// connections. -// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 -const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' +conversions["double"] = function (V) { + const x = +V; -/** @type {PropertyDescriptor} */ -const staticPropertyDescriptors = { - enumerable: true, - writable: false, - configurable: false -} + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } -const states = { - CONNECTING: 0, - OPEN: 1, - CLOSING: 2, - CLOSED: 3 -} + return x; +}; -const opcodes = { - CONTINUATION: 0x0, - TEXT: 0x1, - BINARY: 0x2, - CLOSE: 0x8, - PING: 0x9, - PONG: 0xA -} +conversions["unrestricted double"] = function (V) { + const x = +V; + + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } + + return x; +}; + +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; + +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); +}; + +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } + + return x; +}; + +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } -const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 + return U.join(''); +}; -const parserStates = { - INFO: 0, - PAYLOADLENGTH_16: 2, - PAYLOADLENGTH_64: 3, - READ_DATA: 4 -} +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } -const emptyBuffer = Buffer.allocUnsafe(0) + return V; +}; -module.exports = { - uid, - staticPropertyDescriptors, - states, - opcodes, - maxUnsigned16Bit, - parserStates, - emptyBuffer -} +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; /***/ }), -/***/ 2611: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 97537: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +const usm = __nccwpck_require__(2158); -const { webidl } = __nccwpck_require__(1744) -const { kEnumerableProperty } = __nccwpck_require__(3983) -const { MessagePort } = __nccwpck_require__(1267) +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; -/** - * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent - */ -class MessageEvent extends Event { - #eventInit + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } - constructor (type, eventInitDict = {}) { - webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } - type = webidl.converters.DOMString(type) - eventInitDict = webidl.converters.MessageEventInit(eventInitDict) + this._url = parsedURL; - super(type, eventInitDict) + // TODO: query stuff + } - this.#eventInit = eventInitDict + get href() { + return usm.serializeURL(this._url); } - get data () { - webidl.brandCheck(this, MessageEvent) + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } - return this.#eventInit.data + this._url = parsedURL; } - get origin () { - webidl.brandCheck(this, MessageEvent) + get origin() { + return usm.serializeURLOrigin(this._url); + } - return this.#eventInit.origin + get protocol() { + return this._url.scheme + ":"; } - get lastEventId () { - webidl.brandCheck(this, MessageEvent) + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } - return this.#eventInit.lastEventId + get username() { + return this._url.username; } - get source () { - webidl.brandCheck(this, MessageEvent) + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } - return this.#eventInit.source + usm.setTheUsername(this._url, v); } - get ports () { - webidl.brandCheck(this, MessageEvent) + get password() { + return this._url.password; + } - if (!Object.isFrozen(this.#eventInit.ports)) { - Object.freeze(this.#eventInit.ports) + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; } - return this.#eventInit.ports + usm.setThePassword(this._url, v); } - initMessageEvent ( - type, - bubbles = false, - cancelable = false, - data = null, - origin = '', - lastEventId = '', - source = null, - ports = [] - ) { - webidl.brandCheck(this, MessageEvent) + get host() { + const url = this._url; - webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) + if (url.host === null) { + return ""; + } - return new MessageEvent(type, { - bubbles, cancelable, data, origin, lastEventId, source, ports - }) - } -} + if (url.port === null) { + return usm.serializeHost(url.host); + } -/** - * @see https://websockets.spec.whatwg.org/#the-closeevent-interface - */ -class CloseEvent extends Event { - #eventInit + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } - constructor (type, eventInitDict = {}) { - webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } - type = webidl.converters.DOMString(type) - eventInitDict = webidl.converters.CloseEventInit(eventInitDict) + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } - super(type, eventInitDict) + get hostname() { + if (this._url.host === null) { + return ""; + } - this.#eventInit = eventInitDict + return usm.serializeHost(this._url.host); } - get wasClean () { - webidl.brandCheck(this, CloseEvent) + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } - return this.#eventInit.wasClean + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); } - get code () { - webidl.brandCheck(this, CloseEvent) + get port() { + if (this._url.port === null) { + return ""; + } - return this.#eventInit.code + return usm.serializeInteger(this._url.port); } - get reason () { - webidl.brandCheck(this, CloseEvent) + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } - return this.#eventInit.reason + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + } } -} -// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface -class ErrorEvent extends Event { - #eventInit + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } - constructor (type, eventInitDict) { - webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) + if (this._url.path.length === 0) { + return ""; + } - super(type, eventInitDict) + return "/" + this._url.path.join("/"); + } - type = webidl.converters.DOMString(type) - eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } - this.#eventInit = eventInitDict + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); } - get message () { - webidl.brandCheck(this, ErrorEvent) + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } - return this.#eventInit.message + return "?" + this._url.query; } - get filename () { - webidl.brandCheck(this, ErrorEvent) + set search(v) { + // TODO: query stuff - return this.#eventInit.filename + const url = this._url; + + if (v === "") { + url.query = null; + return; + } + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); } - get lineno () { - webidl.brandCheck(this, ErrorEvent) + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } - return this.#eventInit.lineno + return "#" + this._url.fragment; } - get colno () { - webidl.brandCheck(this, ErrorEvent) + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } - return this.#eventInit.colno + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); } - get error () { - webidl.brandCheck(this, ErrorEvent) + toJSON() { + return this.href; + } +}; - return this.#eventInit.error + +/***/ }), + +/***/ 63394: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const conversions = __nccwpck_require__(54886); +const utils = __nccwpck_require__(83185); +const Impl = __nccwpck_require__(97537); + +const impl = utils.implSymbol; + +function URL(url) { + if (!this || this[impl] || !(this instanceof URL)) { + throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); + } + if (arguments.length < 1) { + throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); + } + const args = []; + for (let i = 0; i < arguments.length && i < 2; ++i) { + args[i] = arguments[i]; + } + args[0] = conversions["USVString"](args[0]); + if (args[1] !== undefined) { + args[1] = conversions["USVString"](args[1]); } + + module.exports.setup(this, args); } -Object.defineProperties(MessageEvent.prototype, { - [Symbol.toStringTag]: { - value: 'MessageEvent', - configurable: true +URL.prototype.toJSON = function toJSON() { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + const args = []; + for (let i = 0; i < arguments.length && i < 0; ++i) { + args[i] = arguments[i]; + } + return this[impl].toJSON.apply(this[impl], args); +}; +Object.defineProperty(URL.prototype, "href", { + get() { + return this[impl].href; }, - data: kEnumerableProperty, - origin: kEnumerableProperty, - lastEventId: kEnumerableProperty, - source: kEnumerableProperty, - ports: kEnumerableProperty, - initMessageEvent: kEnumerableProperty -}) + set(V) { + V = conversions["USVString"](V); + this[impl].href = V; + }, + enumerable: true, + configurable: true +}); -Object.defineProperties(CloseEvent.prototype, { - [Symbol.toStringTag]: { - value: 'CloseEvent', - configurable: true +URL.prototype.toString = function () { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + return this.href; +}; + +Object.defineProperty(URL.prototype, "origin", { + get() { + return this[impl].origin; }, - reason: kEnumerableProperty, - code: kEnumerableProperty, - wasClean: kEnumerableProperty -}) + enumerable: true, + configurable: true +}); -Object.defineProperties(ErrorEvent.prototype, { - [Symbol.toStringTag]: { - value: 'ErrorEvent', - configurable: true +Object.defineProperty(URL.prototype, "protocol", { + get() { + return this[impl].protocol; }, - message: kEnumerableProperty, - filename: kEnumerableProperty, - lineno: kEnumerableProperty, - colno: kEnumerableProperty, - error: kEnumerableProperty -}) + set(V) { + V = conversions["USVString"](V); + this[impl].protocol = V; + }, + enumerable: true, + configurable: true +}); -webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) +Object.defineProperty(URL.prototype, "username", { + get() { + return this[impl].username; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].username = V; + }, + enumerable: true, + configurable: true +}); -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.MessagePort -) +Object.defineProperty(URL.prototype, "password", { + get() { + return this[impl].password; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].password = V; + }, + enumerable: true, + configurable: true +}); -const eventInit = [ - { - key: 'bubbles', - converter: webidl.converters.boolean, - defaultValue: false +Object.defineProperty(URL.prototype, "host", { + get() { + return this[impl].host; }, - { - key: 'cancelable', - converter: webidl.converters.boolean, - defaultValue: false + set(V) { + V = conversions["USVString"](V); + this[impl].host = V; }, - { - key: 'composed', - converter: webidl.converters.boolean, - defaultValue: false - } -] + enumerable: true, + configurable: true +}); -webidl.converters.MessageEventInit = webidl.dictionaryConverter([ - ...eventInit, - { - key: 'data', - converter: webidl.converters.any, - defaultValue: null +Object.defineProperty(URL.prototype, "hostname", { + get() { + return this[impl].hostname; }, - { - key: 'origin', - converter: webidl.converters.USVString, - defaultValue: '' + set(V) { + V = conversions["USVString"](V); + this[impl].hostname = V; }, - { - key: 'lastEventId', - converter: webidl.converters.DOMString, - defaultValue: '' + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "port", { + get() { + return this[impl].port; }, - { - key: 'source', - // Node doesn't implement WindowProxy or ServiceWorker, so the only - // valid value for source is a MessagePort. - converter: webidl.nullableConverter(webidl.converters.MessagePort), - defaultValue: null + set(V) { + V = conversions["USVString"](V); + this[impl].port = V; }, - { - key: 'ports', - converter: webidl.converters['sequence'], - get defaultValue () { - return [] - } - } -]) + enumerable: true, + configurable: true +}); -webidl.converters.CloseEventInit = webidl.dictionaryConverter([ - ...eventInit, - { - key: 'wasClean', - converter: webidl.converters.boolean, - defaultValue: false +Object.defineProperty(URL.prototype, "pathname", { + get() { + return this[impl].pathname; }, - { - key: 'code', - converter: webidl.converters['unsigned short'], - defaultValue: 0 + set(V) { + V = conversions["USVString"](V); + this[impl].pathname = V; }, - { - key: 'reason', - converter: webidl.converters.USVString, - defaultValue: '' - } -]) + enumerable: true, + configurable: true +}); -webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ - ...eventInit, - { - key: 'message', - converter: webidl.converters.DOMString, - defaultValue: '' +Object.defineProperty(URL.prototype, "search", { + get() { + return this[impl].search; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].search = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hash", { + get() { + return this[impl].hash; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hash = V; }, - { - key: 'filename', - converter: webidl.converters.USVString, - defaultValue: '' + enumerable: true, + configurable: true +}); + + +module.exports = { + is(obj) { + return !!obj && obj[impl] instanceof Impl.implementation; }, - { - key: 'lineno', - converter: webidl.converters['unsigned long'], - defaultValue: 0 + create(constructorArgs, privateData) { + let obj = Object.create(URL.prototype); + this.setup(obj, constructorArgs, privateData); + return obj; }, - { - key: 'colno', - converter: webidl.converters['unsigned long'], - defaultValue: 0 + setup(obj, constructorArgs, privateData) { + if (!privateData) privateData = {}; + privateData.wrapper = obj; + + obj[impl] = new Impl.implementation(constructorArgs, privateData); + obj[impl][utils.wrapperSymbol] = obj; }, - { - key: 'error', - converter: webidl.converters.any + interface: URL, + expose: { + Window: { URL: URL }, + Worker: { URL: URL } } -]) +}; -module.exports = { - MessageEvent, - CloseEvent, - ErrorEvent -} /***/ }), -/***/ 5444: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 28665: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const { maxUnsigned16Bit } = __nccwpck_require__(9188) +exports.URL = __nccwpck_require__(63394)["interface"]; +exports.serializeURL = __nccwpck_require__(2158).serializeURL; +exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin; +exports.basicURLParse = __nccwpck_require__(2158).basicURLParse; +exports.setTheUsername = __nccwpck_require__(2158).setTheUsername; +exports.setThePassword = __nccwpck_require__(2158).setThePassword; +exports.serializeHost = __nccwpck_require__(2158).serializeHost; +exports.serializeInteger = __nccwpck_require__(2158).serializeInteger; +exports.parseURL = __nccwpck_require__(2158).parseURL; -/** @type {import('crypto')} */ -let crypto -try { - crypto = __nccwpck_require__(6113) -} catch { -} +/***/ }), -class WebsocketFrameSend { - /** - * @param {Buffer|undefined} data - */ - constructor (data) { - this.frameData = data - this.maskKey = crypto.randomBytes(4) - } +/***/ 2158: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - createFrame (opcode) { - const bodyLength = this.frameData?.byteLength ?? 0 +"use strict"; - /** @type {number} */ - let payloadLength = bodyLength // 0-125 - let offset = 6 +const punycode = __nccwpck_require__(85477); +const tr46 = __nccwpck_require__(84256); - if (bodyLength > maxUnsigned16Bit) { - offset += 8 // payload length is next 8 bytes - payloadLength = 127 - } else if (bodyLength > 125) { - offset += 2 // payload length is next 2 bytes - payloadLength = 126 - } +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; - const buffer = Buffer.allocUnsafe(bodyLength + offset) +const failure = Symbol("failure"); - // Clear first 2 bytes, everything else is overwritten - buffer[0] = buffer[1] = 0 - buffer[0] |= 0x80 // FIN - buffer[0] = (buffer[0] & 0xF0) + opcode // opcode +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} - /*! ws. MIT License. Einar Otto Stangvik */ - buffer[offset - 4] = this.maskKey[0] - buffer[offset - 3] = this.maskKey[1] - buffer[offset - 2] = this.maskKey[2] - buffer[offset - 1] = this.maskKey[3] +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} - buffer[1] = payloadLength +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} - if (payloadLength === 126) { - buffer.writeUInt16BE(bodyLength, 2) - } else if (payloadLength === 127) { - // Clear extended payload length - buffer[2] = buffer[3] = 0 - buffer.writeUIntBE(bodyLength, 4, 6) - } +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} - buffer[1] |= 0x80 // MASK +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} - // mask body - for (let i = 0; i < bodyLength; i++) { - buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] - } +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} - return buffer - } +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; } -module.exports = { - WebsocketFrameSend +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; } +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} -/***/ }), +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} -/***/ 1688: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} -"use strict"; +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} -const { Writable } = __nccwpck_require__(2781) -const diagnosticsChannel = __nccwpck_require__(7643) -const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(9188) -const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(7578) -const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(5515) -const { WebsocketFrameSend } = __nccwpck_require__(5444) +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} -// This code was influenced by ws released under the MIT license. -// Copyright (c) 2011 Einar Otto Stangvik -// Copyright (c) 2013 Arnout Kazemier and contributors -// Copyright (c) 2016 Luigi Pinca and contributors +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} -const channels = {} -channels.ping = diagnosticsChannel.channel('undici:websocket:ping') -channels.pong = diagnosticsChannel.channel('undici:websocket:pong') +function defaultPort(scheme) { + return specialSchemes[scheme]; +} -class ByteParser extends Writable { - #buffers = [] - #byteOffset = 0 +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } - #state = parserStates.INFO + return "%" + hex; +} - #info = {} - #fragments = [] +function utf8PercentEncode(c) { + const buf = new Buffer(c); - constructor (ws) { - super() + let str = ""; - this.ws = ws + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); } - /** - * @param {Buffer} chunk - * @param {() => void} callback - */ - _write (chunk, _, callback) { - this.#buffers.push(chunk) - this.#byteOffset += chunk.length + return str; +} - this.run(callback) +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } } + return new Buffer(output).toString(); +} - /** - * Runs whenever a new chunk is received. - * Callback is called whenever there are no more chunks buffering, - * or not enough bytes are buffered to parse. - */ - run (callback) { - while (true) { - if (this.#state === parserStates.INFO) { - // If there aren't enough bytes to parse the payload length, etc. - if (this.#byteOffset < 2) { - return callback() - } +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} - const buffer = this.consume(2) +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} - this.#info.fin = (buffer[0] & 0x80) !== 0 - this.#info.opcode = buffer[0] & 0x0F +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} - // If we receive a fragmented message, we use the type of the first - // frame to parse the full message as binary/text, when it's terminated - this.#info.originalOpcode ??= this.#info.opcode +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); - this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } - if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { - // Only text and binary frames can be fragmented - failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') - return - } + return cStr; +} - const payloadLength = buffer[1] & 0x7F +function parseIPv4Number(input) { + let R = 10; - if (payloadLength <= 125) { - this.#info.payloadLength = payloadLength - this.#state = parserStates.READ_DATA - } else if (payloadLength === 126) { - this.#state = parserStates.PAYLOADLENGTH_16 - } else if (payloadLength === 127) { - this.#state = parserStates.PAYLOADLENGTH_64 - } + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } - if (this.#info.fragmented && payloadLength > 125) { - // A fragmented frame can't be fragmented itself - failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') - return - } else if ( - (this.#info.opcode === opcodes.PING || - this.#info.opcode === opcodes.PONG || - this.#info.opcode === opcodes.CLOSE) && - payloadLength > 125 - ) { - // Control frames can have a payload length of 125 bytes MAX - failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') - return - } else if (this.#info.opcode === opcodes.CLOSE) { - if (payloadLength === 1) { - failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') - return - } + if (input === "") { + return 0; + } - const body = this.consume(payloadLength) + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } - this.#info.closeInfo = this.parseCloseBody(false, body) + return parseInt(input, R); +} - if (!this.ws[kSentClose]) { - // If an endpoint receives a Close frame and did not previously send a - // Close frame, the endpoint MUST send a Close frame in response. (When - // sending a Close frame in response, the endpoint typically echos the - // status code it received.) - const body = Buffer.allocUnsafe(2) - body.writeUInt16BE(this.#info.closeInfo.code, 0) - const closeFrame = new WebsocketFrameSend(body) +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } - this.ws[kResponse].socket.write( - closeFrame.createFrame(opcodes.CLOSE), - (err) => { - if (!err) { - this.ws[kSentClose] = true - } - } - ) - } + if (parts.length > 4) { + return input; + } - // Upon either sending or receiving a Close control frame, it is said - // that _The WebSocket Closing Handshake is Started_ and that the - // WebSocket connection is in the CLOSING state. - this.ws[kReadyState] = states.CLOSING - this.ws[kReceivedClose] = true + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } - this.end() + numbers.push(n); + } - return - } else if (this.#info.opcode === opcodes.PING) { - // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in - // response, unless it already received a Close frame. - // A Pong frame sent in response to a Ping frame must have identical - // "Application data" + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } - const body = this.consume(payloadLength) + let ipv4 = numbers.pop(); + let counter = 0; - if (!this.ws[kReceivedClose]) { - const frame = new WebsocketFrameSend(body) + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } - this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) + return ipv4; +} - if (channels.ping.hasSubscribers) { - channels.ping.publish({ - payload: body - }) - } - } +function serializeIPv4(address) { + let output = ""; + let n = address; - this.#state = parserStates.INFO + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } - if (this.#byteOffset > 0) { - continue - } else { - callback() - return - } - } else if (this.#info.opcode === opcodes.PONG) { - // A Pong frame MAY be sent unsolicited. This serves as a - // unidirectional heartbeat. A response to an unsolicited Pong frame is - // not expected. + return output; +} - const body = this.consume(payloadLength) +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; - if (channels.pong.hasSubscribers) { - channels.pong.publish({ - payload: body - }) - } + input = punycode.ucs2.decode(input); - if (this.#byteOffset > 0) { - continue - } else { - callback() - return - } - } - } else if (this.#state === parserStates.PAYLOADLENGTH_16) { - if (this.#byteOffset < 2) { - return callback() - } + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } - const buffer = this.consume(2) + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } - this.#info.payloadLength = buffer.readUInt16BE(0) - this.#state = parserStates.READ_DATA - } else if (this.#state === parserStates.PAYLOADLENGTH_64) { - if (this.#byteOffset < 8) { - return callback() - } + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } - const buffer = this.consume(8) - const upper = buffer.readUInt32BE(0) + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } - // 2^31 is the maxinimum bytes an arraybuffer can contain - // on 32-bit systems. Although, on 64-bit systems, this is - // 2^53-1 bytes. - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length - // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 - // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e - if (upper > 2 ** 31 - 1) { - failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') - return - } + let value = 0; + let length = 0; - const lower = buffer.readUInt32BE(4) + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } - this.#info.payloadLength = (upper << 8) + lower - this.#state = parserStates.READ_DATA - } else if (this.#state === parserStates.READ_DATA) { - if (this.#byteOffset < this.#info.payloadLength) { - // If there is still more data in this chunk that needs to be read - return callback() - } else if (this.#byteOffset >= this.#info.payloadLength) { - // If the server sent multiple frames in a single chunk + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } - const body = this.consume(this.#info.payloadLength) + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } - this.#fragments.push(body) + if (!isASCIIDigit(input[pointer])) { + return failure; + } - // If the frame is unfragmented, or a fragmented frame was terminated, - // a message was received - if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { - const fullMessage = Buffer.concat(this.#fragments) + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } - websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - this.#info = {} - this.#fragments.length = 0 - } + ++numbersSeen; - this.#state = parserStates.INFO + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; } } - if (this.#byteOffset > 0) { - continue - } else { - callback() - break + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; } + } else if (input[pointer] !== undefined) { + return failure; } + + address[pieceIndex] = value; + ++pieceIndex; } - /** - * Take n bytes from the buffered Buffers - * @param {number} n - * @returns {Buffer|null} - */ - consume (n) { - if (n > this.#byteOffset) { - return null - } else if (n === 0) { - return emptyBuffer + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } - if (this.#buffers[0].length === n) { - this.#byteOffset -= this.#buffers[0].length - return this.#buffers.shift() - } + return address; +} - const buffer = Buffer.allocUnsafe(n) - let offset = 0 +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; - while (offset !== n) { - const next = this.#buffers[0] - const { length } = next + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } - if (length + offset === n) { - buffer.set(this.#buffers.shift(), offset) - break - } else if (length + offset > n) { - buffer.set(next.subarray(0, n - offset), offset) - this.#buffers[0] = next.subarray(n - offset) - break - } else { - buffer.set(this.#buffers.shift(), offset) - offset += next.length - } + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; } - this.#byteOffset -= n + output += address[pieceIndex].toString(16); - return buffer + if (pieceIndex !== 7) { + output += ":"; + } } - parseCloseBody (onlyCode, data) { - // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 - /** @type {number|undefined} */ - let code + return output; +} - if (data.length >= 2) { - // _The WebSocket Connection Close Code_ is - // defined as the status code (Section 7.4) contained in the first Close - // control frame received by the application - code = data.readUInt16BE(0) +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; } - if (onlyCode) { - if (!isValidStatusCode(code)) { - return null - } + return parseIPv6(input.substring(1, input.length - 1)); + } - return { code } - } + if (!isSpecialArg) { + return parseOpaqueHost(input); + } - // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 - /** @type {Buffer} */ - let reason = data.subarray(2) + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } - // Remove BOM - if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { - reason = reason.subarray(3) - } + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } - if (code !== undefined && !isValidStatusCode(code)) { - return null - } + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } - try { - // TODO: optimize this - reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) - } catch { - return null - } + return asciiDomain; +} - return { code, reason } +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; } - get closingInfo () { - return this.#info.closeInfo + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); } + return output; } -module.exports = { - ByteParser -} - - -/***/ }), +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; -/***/ 7578: -/***/ ((module) => { + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } -"use strict"; + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } -module.exports = { - kWebSocketURL: Symbol('url'), - kReadyState: Symbol('ready state'), - kController: Symbol('controller'), - kResponse: Symbol('response'), - kBinaryType: Symbol('binary type'), - kSentClose: Symbol('sent close'), - kReceivedClose: Symbol('received close'), - kByteParser: Symbol('byte parser') + return { + idx: maxIdx, + len: maxLen + }; } +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } -/***/ }), + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } -/***/ 5515: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return host; +} -"use strict"; +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} -const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(7578) -const { states, opcodes } = __nccwpck_require__(9188) -const { MessageEvent, ErrorEvent } = __nccwpck_require__(2611) +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } -/* globals Blob */ + path.pop(); +} -/** - * @param {import('./websocket').WebSocket} ws - */ -function isEstablished (ws) { - // If the server's response is validated as provided for above, it is - // said that _The WebSocket Connection is Established_ and that the - // WebSocket Connection is in the OPEN state. - return ws[kReadyState] === states.OPEN +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; } -/** - * @param {import('./websocket').WebSocket} ws - */ -function isClosing (ws) { - // Upon either sending or receiving a Close control frame, it is said - // that _The WebSocket Closing Handshake is Started_ and that the - // WebSocket connection is in the CLOSING state. - return ws[kReadyState] === states.CLOSING +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; } -/** - * @param {import('./websocket').WebSocket} ws - */ -function isClosed (ws) { - return ws[kReadyState] === states.CLOSED +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); } -/** - * @see https://dom.spec.whatwg.org/#concept-event-fire - * @param {string} e - * @param {EventTarget} target - * @param {EventInit | undefined} eventInitDict - */ -function fireEvent (e, target, eventConstructor = Event, eventInitDict) { - // 1. If eventConstructor is not given, then let eventConstructor be Event. +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; - // 2. Let event be the result of creating an event given eventConstructor, - // in the relevant realm of target. - // 3. Initialize event’s type attribute to e. - const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, - // 4. Initialize any other IDL attributes of event as described in the - // invocation of this algorithm. + cannotBeABaseURL: false + }; - // 5. Return the result of dispatching event at target, with legacy target - // override flag set if set. - target.dispatchEvent(event) -} + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } -/** - * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol - * @param {import('./websocket').WebSocket} ws - * @param {number} type Opcode - * @param {Buffer} data application data - */ -function websocketMessageReceived (ws, type, data) { - // 1. If ready state is not OPEN (1), then return. - if (ws[kReadyState] !== states.OPEN) { - return + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; } + this.input = res; - // 2. Let dataForEvent be determined by switching on type and binary type: - let dataForEvent + this.state = stateOverride || "scheme start"; - if (type === opcodes.TEXT) { - // -> type indicates that the data is Text - // a new DOMString containing data - try { - dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) - } catch { - failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') - return - } - } else if (type === opcodes.BINARY) { - if (ws[kBinaryType] === 'blob') { - // -> type indicates that the data is Binary and binary type is "blob" - // a new Blob object, created in the relevant Realm of the WebSocket - // object, that represents data as its raw data - dataForEvent = new Blob([data]) - } else { - // -> type indicates that the data is Binary and binary type is "arraybuffer" - // a new ArrayBuffer object, created in the relevant Realm of the - // WebSocket object, whose contents are data - dataForEvent = new Uint8Array(data).buffer + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; } } - - // 3. Fire an event named message at the WebSocket object, using MessageEvent, - // with the origin attribute initialized to the serialization of the WebSocket - // object’s url's origin, and the data attribute initialized to dataForEvent. - fireEvent('message', ws, MessageEvent, { - origin: ws[kWebSocketURL].origin, - data: dataForEvent - }) } -/** - * @see https://datatracker.ietf.org/doc/html/rfc6455 - * @see https://datatracker.ietf.org/doc/html/rfc2616 - * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 - * @param {string} protocol - */ -function isValidSubprotocol (protocol) { - // If present, this value indicates one - // or more comma-separated subprotocol the client wishes to speak, - // ordered by preference. The elements that comprise this value - // MUST be non-empty strings with characters in the range U+0021 to - // U+007E not including separator characters as defined in - // [RFC2616] and MUST all be unique strings. - if (protocol.length === 0) { - return false +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; } - for (const char of protocol) { - const code = char.charCodeAt(0) + return true; +}; - if ( - code < 0x21 || - code > 0x7E || - char === '(' || - char === ')' || - char === '<' || - char === '>' || - char === '@' || - char === ',' || - char === ';' || - char === ':' || - char === '\\' || - char === '"' || - char === '/' || - char === '[' || - char === ']' || - char === '?' || - char === '=' || - char === '{' || - char === '}' || - code === 32 || // SP - code === 9 // HT - ) { - return false - } - } +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } - return true -} + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } -/** - * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 - * @param {number} code - */ -function isValidStatusCode (code) { - if (code >= 1000 && code < 1015) { - return ( - code !== 1004 && // reserved - code !== 1005 && // "MUST NOT be set as a status code" - code !== 1006 // "MUST NOT be set as a status code" - ) + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; } - return code >= 3000 && code <= 4999 -} + return true; +}; -/** - * @param {import('./websocket').WebSocket} ws - * @param {string|undefined} reason - */ -function failWebsocketConnection (ws, reason) { - const { [kController]: controller, [kResponse]: response } = ws +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } - controller.abort() + return true; +}; - if (response?.socket && !response.socket.destroyed) { - response.socket.destroy() +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; } - if (reason) { - fireEvent('error', ws, ErrorEvent, { - error: new Error(reason) - }) + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; } -} -module.exports = { - isEstablished, - isClosing, - isClosed, - fireEvent, - isValidSubprotocol, - isValidStatusCode, - failWebsocketConnection, - websocketMessageReceived -} + return true; +}; +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); -/***/ }), + this.state = "path"; + --this.pointer; + } -/***/ 4284: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return true; +}; -"use strict"; +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + return true; +}; -const { webidl } = __nccwpck_require__(1744) -const { DOMException } = __nccwpck_require__(1037) -const { URLSerializer } = __nccwpck_require__(685) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(9188) -const { - kWebSocketURL, - kReadyState, - kController, - kBinaryType, - kResponse, - kSentClose, - kByteParser -} = __nccwpck_require__(7578) -const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(5515) -const { establishWebSocketConnection } = __nccwpck_require__(5354) -const { WebsocketFrameSend } = __nccwpck_require__(5444) -const { ByteParser } = __nccwpck_require__(1688) -const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3983) -const { getGlobalDispatcher } = __nccwpck_require__(1892) -const { types } = __nccwpck_require__(3837) +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } -let experimentalWarned = false + return true; +}; -// https://websockets.spec.whatwg.org/#interface-definition -class WebSocket extends EventTarget { - #events = { - open: null, - error: null, - close: null, - message: null +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; } - #bufferedAmount = 0 - #protocol = '' - #extensions = '' + return true; +}; - /** - * @param {string} url - * @param {string|string[]} protocols - */ - constructor (url, protocols = []) { - super() +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; - webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); - if (!experimentalWarned) { - experimentalWarned = true - process.emitWarning('WebSockets are experimental, expect them to change at any time.', { - code: 'UNDICI-WS' - }) + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } - const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) + return true; +}; - url = webidl.converters.USVString(url) - protocols = options.protocols +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } - // 1. Let baseURL be this's relevant settings object's API base URL. - const baseURL = getGlobalOrigin() + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } - // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. - let urlRecord + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } - try { - urlRecord = new URL(url, baseURL) - } catch (e) { - // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. - throw new DOMException(e, 'SyntaxError') + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; } - // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". - if (urlRecord.protocol === 'http:') { - urlRecord.protocol = 'ws:' - } else if (urlRecord.protocol === 'https:') { - // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". - urlRecord.protocol = 'wss:' + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; } + this.buffer += cStr; + } - // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. - if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { - throw new DOMException( - `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, - 'SyntaxError' - ) + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } - // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" - // DOMException. - if (urlRecord.hash || urlRecord.href.endsWith('#')) { - throw new DOMException('Got fragment', 'SyntaxError') + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } - // 8. If protocols is a string, set protocols to a sequence consisting - // of just that string. - if (typeof protocols === 'string') { - protocols = [protocols] + this.state = "path"; + --this.pointer; } + } else { + this.state = "path"; + --this.pointer; + } - // 9. If any of the values in protocols occur more than once or otherwise - // fail to match the requirements for elements that comprise the value - // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket - // protocol, then throw a "SyntaxError" DOMException. - if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { - throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } } + this.state = "path"; + --this.pointer; + } - if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { - throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; } + } else { + this.buffer += cStr; + } - // 10. Set this's url to urlRecord. - this[kWebSocketURL] = new URL(urlRecord.href) + return true; +}; - // 11. Let client be this's relevant settings object. +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; - // 12. Run this step in parallel: + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } - // 1. Establish a WebSocket connection given urlRecord, protocols, - // and client. - this[kController] = establishWebSocketConnection( - urlRecord, - protocols, - this, - (response) => this.#onConnectionEstablished(response), - options - ) + return true; +}; - // Each WebSocket object has an associated ready state, which is a - // number representing the state of the connection. Initially it must - // be CONNECTING (0). - this[kReadyState] = WebSocket.CONNECTING +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } - // The extensions attribute must initially return the empty string. + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. - // The protocol attribute must initially return the empty string. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } - // Each WebSocket object has an associated binary type, which is a - // BinaryType. Initially it must be "blob". - this[kBinaryType] = 'blob' + this.buffer += percentEncodeChar(c, isPathPercentEncode); } - /** - * @see https://websockets.spec.whatwg.org/#dom-websocket-close - * @param {number|undefined} code - * @param {string|undefined} reason - */ - close (code = undefined, reason = undefined) { - webidl.brandCheck(this, WebSocket) + return true; +}; - if (code !== undefined) { - code = webidl.converters['unsigned short'](code, { clamp: true }) +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; } - if (reason !== undefined) { - reason = webidl.converters.USVString(reason) + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; } - // 1. If code is present, but is neither an integer equal to 1000 nor an - // integer in the range 3000 to 4999, inclusive, throw an - // "InvalidAccessError" DOMException. - if (code !== undefined) { - if (code !== 1000 && (code < 3000 || code > 4999)) { - throw new DOMException('invalid code', 'InvalidAccessError') - } + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); } + } - let reasonByteLength = 0 - - // 2. If reason is present, then run these substeps: - if (reason !== undefined) { - // 1. Let reasonBytes be the result of encoding reason. - // 2. If reasonBytes is longer than 123 bytes, then throw a - // "SyntaxError" DOMException. - reasonByteLength = Buffer.byteLength(reason) + return true; +}; - if (reasonByteLength > 123) { - throw new DOMException( - `Reason must be less than 123 bytes; received ${reasonByteLength}`, - 'SyntaxError' - ) - } +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; } - // 3. Run the first matching steps from the following list: - if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { - // If this's ready state is CLOSING (2) or CLOSED (3) - // Do nothing. - } else if (!isEstablished(this)) { - // If the WebSocket connection is not yet established - // Fail the WebSocket connection and set this's ready state - // to CLOSING (2). - failWebsocketConnection(this, 'Connection was closed before it was established.') - this[kReadyState] = WebSocket.CLOSING - } else if (!isClosing(this)) { - // If the WebSocket closing handshake has not yet been started - // Start the WebSocket closing handshake and set this's ready - // state to CLOSING (2). - // - If neither code nor reason is present, the WebSocket Close - // message must not have a body. - // - If code is present, then the status code to use in the - // WebSocket Close message must be the integer given by code. - // - If reason is also present, then reasonBytes must be - // provided in the Close message after the status code. - - const frame = new WebsocketFrameSend() - - // If neither code nor reason is present, the WebSocket Close - // message must not have a body. - - // If code is present, then the status code to use in the - // WebSocket Close message must be the integer given by code. - if (code !== undefined && reason === undefined) { - frame.frameData = Buffer.allocUnsafe(2) - frame.frameData.writeUInt16BE(code, 0) - } else if (code !== undefined && reason !== undefined) { - // If reason is also present, then reasonBytes must be - // provided in the Close message after the status code. - frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) - frame.frameData.writeUInt16BE(code, 0) - // the body MAY contain UTF-8-encoded data with value /reason/ - frame.frameData.write(reason, 2, 'utf-8') + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); } else { - frame.frameData = emptyBuffer + this.url.query += String.fromCodePoint(buffer[i]); } - - /** @type {import('stream').Duplex} */ - const socket = this[kResponse].socket - - socket.write(frame.createFrame(opcodes.CLOSE), (err) => { - if (!err) { - this[kSentClose] = true - } - }) - - // Upon either sending or receiving a Close control frame, it is said - // that _The WebSocket Closing Handshake is Started_ and that the - // WebSocket connection is in the CLOSING state. - this[kReadyState] = states.CLOSING - } else { - // Otherwise - // Set this's ready state to CLOSING (2). - this[kReadyState] = WebSocket.CLOSING } - } - - /** - * @see https://websockets.spec.whatwg.org/#dom-websocket-send - * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data - */ - send (data) { - webidl.brandCheck(this, WebSocket) - - webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) - - data = webidl.converters.WebSocketSendData(data) - // 1. If this's ready state is CONNECTING, then throw an - // "InvalidStateError" DOMException. - if (this[kReadyState] === WebSocket.CONNECTING) { - throw new DOMException('Sent before connected.', 'InvalidStateError') + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; } - - // 2. Run the appropriate set of steps from the following list: - // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 - // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 - - if (!isEstablished(this) || isClosing(this)) { - return + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; } - /** @type {import('stream').Duplex} */ - const socket = this[kResponse].socket - - // If data is a string - if (typeof data === 'string') { - // If the WebSocket connection is established and the WebSocket - // closing handshake has not yet started, then the user agent - // must send a WebSocket Message comprised of the data argument - // using a text frame opcode; if the data cannot be sent, e.g. - // because it would need to be buffered but the buffer is full, - // the user agent must flag the WebSocket as full and then close - // the WebSocket connection. Any invocation of this method with a - // string argument that does not throw an exception must increase - // the bufferedAmount attribute by the number of bytes needed to - // express the argument as UTF-8. - - const value = Buffer.from(data) - const frame = new WebsocketFrameSend(value) - const buffer = frame.createFrame(opcodes.TEXT) - - this.#bufferedAmount += value.byteLength - socket.write(buffer, () => { - this.#bufferedAmount -= value.byteLength - }) - } else if (types.isArrayBuffer(data)) { - // If the WebSocket connection is established, and the WebSocket - // closing handshake has not yet started, then the user agent must - // send a WebSocket Message comprised of data using a binary frame - // opcode; if the data cannot be sent, e.g. because it would need - // to be buffered but the buffer is full, the user agent must flag - // the WebSocket as full and then close the WebSocket connection. - // The data to be sent is the data stored in the buffer described - // by the ArrayBuffer object. Any invocation of this method with an - // ArrayBuffer argument that does not throw an exception must - // increase the bufferedAmount attribute by the length of the - // ArrayBuffer in bytes. + this.buffer += cStr; + } - const value = Buffer.from(data) - const frame = new WebsocketFrameSend(value) - const buffer = frame.createFrame(opcodes.BINARY) + return true; +}; - this.#bufferedAmount += value.byteLength - socket.write(buffer, () => { - this.#bufferedAmount -= value.byteLength - }) - } else if (ArrayBuffer.isView(data)) { - // If the WebSocket connection is established, and the WebSocket - // closing handshake has not yet started, then the user agent must - // send a WebSocket Message comprised of data using a binary frame - // opcode; if the data cannot be sent, e.g. because it would need to - // be buffered but the buffer is full, the user agent must flag the - // WebSocket as full and then close the WebSocket connection. The - // data to be sent is the data stored in the section of the buffer - // described by the ArrayBuffer object that data references. Any - // invocation of this method with this kind of argument that does - // not throw an exception must increase the bufferedAmount attribute - // by the length of data’s buffer in bytes. +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } - const ab = Buffer.from(data, data.byteOffset, data.byteLength) + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } - const frame = new WebsocketFrameSend(ab) - const buffer = frame.createFrame(opcodes.BINARY) + return true; +}; - this.#bufferedAmount += ab.byteLength - socket.write(buffer, () => { - this.#bufferedAmount -= ab.byteLength - }) - } else if (isBlobLike(data)) { - // If the WebSocket connection is established, and the WebSocket - // closing handshake has not yet started, then the user agent must - // send a WebSocket Message comprised of data using a binary frame - // opcode; if the data cannot be sent, e.g. because it would need to - // be buffered but the buffer is full, the user agent must flag the - // WebSocket as full and then close the WebSocket connection. The data - // to be sent is the raw data represented by the Blob object. Any - // invocation of this method with a Blob argument that does not throw - // an exception must increase the bufferedAmount attribute by the size - // of the Blob object’s raw data, in bytes. +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; - const frame = new WebsocketFrameSend() + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } - data.arrayBuffer().then((ab) => { - const value = Buffer.from(ab) - frame.frameData = value - const buffer = frame.createFrame(opcodes.BINARY) + output += serializeHost(url.host); - this.#bufferedAmount += value.byteLength - socket.write(buffer, () => { - this.#bufferedAmount -= value.byteLength - }) - }) + if (url.port !== null) { + output += ":" + url.port; } + } else if (url.host === null && url.scheme === "file") { + output += "//"; } - get readyState () { - webidl.brandCheck(this, WebSocket) - - // The readyState getter steps are to return this's ready state. - return this[kReadyState] + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } } - get bufferedAmount () { - webidl.brandCheck(this, WebSocket) - - return this.#bufferedAmount + if (url.query !== null) { + output += "?" + url.query; } - get url () { - webidl.brandCheck(this, WebSocket) - - // The url getter steps are to return this's url, serialized. - return URLSerializer(this[kWebSocketURL]) + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; } - get extensions () { - webidl.brandCheck(this, WebSocket) - - return this.#extensions - } + return output; +} - get protocol () { - webidl.brandCheck(this, WebSocket) +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); - return this.#protocol + if (tuple.port !== null) { + result += ":" + tuple.port; } - get onopen () { - webidl.brandCheck(this, WebSocket) + return result; +} - return this.#events.open - } +module.exports.serializeURL = serializeURL; - set onopen (fn) { - webidl.brandCheck(this, WebSocket) +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; - if (this.#events.open) { - this.removeEventListener('open', this.#events.open) - } +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } - if (typeof fn === 'function') { - this.#events.open = fn - this.addEventListener('open', fn) - } else { - this.#events.open = null - } + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; } - get onerror () { - webidl.brandCheck(this, WebSocket) + return usm.url; +}; - return this.#events.error +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); } +}; - set onerror (fn) { - webidl.brandCheck(this, WebSocket) +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; - if (this.#events.error) { - this.removeEventListener('error', this.#events.error) - } +module.exports.serializeHost = serializeHost; - if (typeof fn === 'function') { - this.#events.error = fn - this.addEventListener('error', fn) - } else { - this.#events.error = null - } - } +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - get onclose () { - webidl.brandCheck(this, WebSocket) +module.exports.serializeInteger = function (integer) { + return String(integer); +}; - return this.#events.close +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; } - set onclose (fn) { - webidl.brandCheck(this, WebSocket) - - if (this.#events.close) { - this.removeEventListener('close', this.#events.close) - } + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; - if (typeof fn === 'function') { - this.#events.close = fn - this.addEventListener('close', fn) - } else { - this.#events.close = null - } - } - get onmessage () { - webidl.brandCheck(this, WebSocket) +/***/ }), - return this.#events.message - } +/***/ 83185: +/***/ ((module) => { - set onmessage (fn) { - webidl.brandCheck(this, WebSocket) +"use strict"; - if (this.#events.message) { - this.removeEventListener('message', this.#events.message) - } - if (typeof fn === 'function') { - this.#events.message = fn - this.addEventListener('message', fn) - } else { - this.#events.message = null - } +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); } +}; - get binaryType () { - webidl.brandCheck(this, WebSocket) +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); - return this[kBinaryType] - } +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; - set binaryType (type) { - webidl.brandCheck(this, WebSocket) +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; - if (type !== 'blob' && type !== 'arraybuffer') { - this[kBinaryType] = 'blob' - } else { - this[kBinaryType] = type - } - } - /** - * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol - */ - #onConnectionEstablished (response) { - // processResponse is called when the "response’s header list has been received and initialized." - // once this happens, the connection is open - this[kResponse] = response - const parser = new ByteParser(this) - parser.on('drain', function onParserDrain () { - this.ws[kResponse].socket.resume() - }) +/***/ }), - response.socket.ws = this - this[kByteParser] = parser +/***/ 62940: +/***/ ((module) => { - // 1. Change the ready state to OPEN (1). - this[kReadyState] = states.OPEN +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) - // 2. Change the extensions attribute’s value to the extensions in use, if - // it is not the null value. - // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 - const extensions = response.headersList.get('sec-websocket-extensions') + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') - if (extensions !== null) { - this.#extensions = extensions - } + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) - // 3. Change the protocol attribute’s value to the subprotocol in use, if - // it is not the null value. - // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 - const protocol = response.headersList.get('sec-websocket-protocol') + return wrapper - if (protocol !== null) { - this.#protocol = protocol + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] } - - // 4. Fire an event named open at the WebSocket object. - fireEvent('open', this) + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret } } -// https://websockets.spec.whatwg.org/#dom-websocket-connecting -WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING -// https://websockets.spec.whatwg.org/#dom-websocket-open -WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN -// https://websockets.spec.whatwg.org/#dom-websocket-closing -WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING -// https://websockets.spec.whatwg.org/#dom-websocket-closed -WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED -Object.defineProperties(WebSocket.prototype, { - CONNECTING: staticPropertyDescriptors, - OPEN: staticPropertyDescriptors, - CLOSING: staticPropertyDescriptors, - CLOSED: staticPropertyDescriptors, - url: kEnumerableProperty, - readyState: kEnumerableProperty, - bufferedAmount: kEnumerableProperty, - onopen: kEnumerableProperty, - onerror: kEnumerableProperty, - onclose: kEnumerableProperty, - close: kEnumerableProperty, - onmessage: kEnumerableProperty, - binaryType: kEnumerableProperty, - send: kEnumerableProperty, - extensions: kEnumerableProperty, - protocol: kEnumerableProperty, - [Symbol.toStringTag]: { - value: 'WebSocket', - writable: false, - enumerable: false, - configurable: true - } -}) +/***/ }), -Object.defineProperties(WebSocket, { - CONNECTING: staticPropertyDescriptors, - OPEN: staticPropertyDescriptors, - CLOSING: staticPropertyDescriptors, - CLOSED: staticPropertyDescriptors -}) +/***/ 15185: +/***/ ((module) => { -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.DOMString -) +class Node { + /// value; + /// next; -webidl.converters['DOMString or sequence'] = function (V) { - if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { - return webidl.converters['sequence'](V) - } + constructor(value) { + this.value = value; - return webidl.converters.DOMString(V) + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } } -// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 -webidl.converters.WebSocketInit = webidl.dictionaryConverter([ - { - key: 'protocols', - converter: webidl.converters['DOMString or sequence'], - get defaultValue () { - return [] - } - }, - { - key: 'dispatcher', - converter: (V) => V, - get defaultValue () { - return getGlobalDispatcher() - } - }, - { - key: 'headers', - converter: webidl.nullableConverter(webidl.converters.HeadersInit) - } -]) - -webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { - if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { - return webidl.converters.WebSocketInit(V) - } +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; - return { protocols: webidl.converters['DOMString or sequence'](V) } -} + constructor() { + this.clear(); + } -webidl.converters.WebSocketSendData = function (V) { - if (webidl.util.Type(V) === 'Object') { - if (isBlobLike(V)) { - return webidl.converters.Blob(V, { strict: false }) - } + enqueue(value) { + const node = new Node(value); - if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { - return webidl.converters.BufferSource(V) - } - } + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } - return webidl.converters.USVString(V) -} + this._size++; + } -module.exports = { - WebSocket -} + dequeue() { + const current = this._head; + if (!current) { + return; + } + this._head = this._head.next; + this._size--; + return current.value; + } -/***/ }), + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } -/***/ 7127: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + get size() { + return this._size; + } + * [Symbol.iterator]() { + let current = this._head; -/** - * For Node.js, simply re-export the core `util.deprecate` function. - */ + while (current) { + yield current.value; + current = current.next; + } + } +} -module.exports = __nccwpck_require__(3837).deprecate; +module.exports = Queue; /***/ }), -/***/ 5840: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 14553: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; })); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const cache = __importStar(__nccwpck_require__(27799)); +const constants_1 = __nccwpck_require__(69042); +const cache_utils_1 = __nccwpck_require__(41678); +// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in +// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to +// throw an uncaught exception. Instead of failing this action, just warn. +process.on('uncaughtException', e => { + const warningPrefix = '[warning]'; + core.info(`${warningPrefix}${e.message}`); +}); +// Added early exit to resolve issue with slow post action step: +function run(earlyExit) { + return __awaiter(this, void 0, void 0, function* () { + try { + const cacheLock = core.getState(constants_1.State.CachePackageManager); + if (cacheLock) { + yield cachePackages(cacheLock); + if (earlyExit) { + process.exit(0); + } + } + else { + core.debug(`Caching for '${cacheLock}' is not supported`); + } + } + catch (error) { + core.setFailed(error.message); + } + }); +} +exports.run = run; +const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const state = core.getState(constants_1.State.CacheMatchedKey); + const primaryKey = core.getState(constants_1.State.CachePrimaryKey); + const cachePaths = JSON.parse(core.getState(constants_1.State.CachePaths) || '[]'); + const packageManagerInfo = yield (0, cache_utils_1.getPackageManagerInfo)(packageManager); + if (!packageManagerInfo) { + core.debug(`Caching for '${packageManager}' is not supported`); + return; + } + if (!cachePaths.length) { + // TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?) + // export declare function getInput(name: string, options?: InputOptions): string; + const cacheDependencyPath = core.getInput('cache-dependency-path') || ''; + throw new Error(`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`); + } + if (primaryKey === state) { + core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + const cacheId = yield cache.saveCache(cachePaths, primaryKey); + if (cacheId == '') { + return; + } + core.info(`Cache saved with the key: ${primaryKey}`); +}); +run(true); -var _v = _interopRequireDefault(__nccwpck_require__(8628)); -var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); +/***/ }), + +/***/ 41678: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); +"use strict"; -var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCacheFeatureAvailable = exports.isGhes = exports.repoHasYarnBerryManagedDependencies = exports.getCacheDirectories = exports.resetProjectDirectoriesMemoized = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const exec = __importStar(__nccwpck_require__(71514)); +const cache = __importStar(__nccwpck_require__(27799)); +const glob = __importStar(__nccwpck_require__(28090)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const util_1 = __nccwpck_require__(92629); +exports.supportedPackageManagers = { + npm: { + name: 'npm', + lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], + getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('npm config get cache', 'Could not get npm cache folder path') + }, + pnpm: { + name: 'pnpm', + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('pnpm store path --silent', 'Could not get pnpm cache folder path') + }, + yarn: { + name: 'yarn', + lockFilePatterns: ['yarn.lock'], + getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () { + const yarnVersion = yield (0, exports.getCommandOutputNotEmpty)(`yarn --version`, 'Could not retrieve version of yarn', projectDir); + core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`); + const stdOut = yarnVersion.startsWith('1.') + ? yield (0, exports.getCommandOutput)('yarn cache dir', projectDir) + : yield (0, exports.getCommandOutput)('yarn config get cacheFolder', projectDir); + if (!stdOut) { + throw new Error(`Could not get yarn cache folder path for ${projectDir}`); + } + return stdOut; + }) + } +}; +const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd }))); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +exports.getCommandOutput = getCommandOutput; +const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = (0, exports.getCommandOutput)(toolCommand, cwd); + if (!stdOut) { + throw new Error(error); + } + return stdOut; +}); +exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty; +const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + return exports.supportedPackageManagers.yarn; + } + else { + return null; + } +}); +exports.getPackageManagerInfo = getPackageManagerInfo; +/** + * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` + * - first through `getCacheDirectories` + * - second from `repoHasYarn3ManagedCache` + * + * it contains expensive IO operation and thus should be memoized + */ +let projectDirectoriesMemoized = null; +/** + * unit test must reset memoized variables + */ +const resetProjectDirectoriesMemoized = () => (projectDirectoriesMemoized = null); +exports.resetProjectDirectoriesMemoized = resetProjectDirectoriesMemoized; +/** + * Expands (converts) the string input `cache-dependency-path` to list of directories that + * may be project roots + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of directories and possible + */ +const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (projectDirectoriesMemoized !== null) { + return projectDirectoriesMemoized; + } + const globber = yield glob.create(cacheDependencyPath); + const cacheDependenciesPaths = yield globber.glob(); + const existingDirectories = cacheDependenciesPaths + .map(path_1.default.dirname) + .filter((0, util_1.unique)()) + .map(dirName => fs_1.default.realpathSync(dirName)) + .filter(directory => fs_1.default.lstatSync(directory).isDirectory()); + if (!existingDirectories.length) + core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`); + projectDirectoriesMemoized = existingDirectories; + return existingDirectories; +}); +/** + * Finds the cache directories configured for the repo if cache-dependency-path is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath); + const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(projectDirectory); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`); + return cacheFolderPath; + }))); + // uniq in order to do not cache the same directories twice + return cacheFoldersPaths.filter((0, util_1.unique)()); +}); +/** + * Finds the cache directories configured for the repo ignoring cache-dependency-path + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @return list of files on which the cache depends + */ +const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`); + return [cacheFolderPath]; +}); +/** + * A function to find the cache directories configured for the repo + * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project + // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 + if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { + return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath); + } + return getCacheDirectoriesForRootProject(packageManagerInfo); +}); +exports.getCacheDirectories = getCacheDirectories; +/** + * A function to check if the directory is a yarn project configured to manage + * obsolete dependencies in the local cache + * @param directory - a path to the folder + * @return - true if the directory's project is yarn managed + * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false + * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false + * - if local cache is not explicitly enabled (not yarn3), return false + * - return true otherwise + */ +const projectHasYarnBerryManagedDependencies = (directory) => __awaiter(void 0, void 0, void 0, function* () { + const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; + core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); + // if .yarn/cache directory exists the cache is managed by version control system + const yarnCacheFile = path_1.default.join(workDir, '.yarn', 'cache'); + if (fs_1.default.existsSync(yarnCacheFile) && + fs_1.default.lstatSync(yarnCacheFile).isDirectory()) { + core.debug(`"${workDir}" has .yarn/cache - dependencies are kept in the repository`); + return Promise.resolve(false); + } + // NOTE: yarn1 returns 'undefined' with return code = 0 + const enableGlobalCache = yield (0, exports.getCommandOutput)('yarn config get enableGlobalCache', workDir); + // only local cache is not managed by yarn + const managed = enableGlobalCache.includes('false'); + if (managed) { + core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); + return true; + } + else { + core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); + return false; + } +}); +/** + * A function to report the repo contains Yarn managed projects + * @param packageManagerInfo - used to make sure current package manager is yarn + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return - true if all project directories configured to be Yarn managed + */ +const repoHasYarnBerryManagedDependencies = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManagerInfo.name !== 'yarn') + return false; + const yarnDirs = cacheDependencyPath + ? yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) + : ['']; + const isManagedList = yield Promise.all(yarnDirs.map(projectHasYarnBerryManagedDependencies)); + return isManagedList.every(Boolean); +}); +exports.repoHasYarnBerryManagedDependencies = repoHasYarnBerryManagedDependencies; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === 'GITHUB.COM'; + const isGitHubEnterpriseCloudHost = hostname.endsWith('.GHE.COM'); + const isLocalHost = hostname.endsWith('.LOCALHOST'); + return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (cache.isFeatureAvailable()) + return true; + if (isGhes()) { + core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + return false; + } + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + return false; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; -var _nil = _interopRequireDefault(__nccwpck_require__(5332)); -var _version = _interopRequireDefault(__nccwpck_require__(2414)); +/***/ }), -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); +/***/ 69042: +/***/ ((__unused_webpack_module, exports) => { -var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); +"use strict"; -var _parse = _interopRequireDefault(__nccwpck_require__(2746)); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Outputs = exports.State = exports.LockType = void 0; +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType || (exports.LockType = LockType = {})); +var State; +(function (State) { + State["CachePackageManager"] = "SETUP_NODE_CACHE_PACKAGE_MANAGER"; + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; + State["CachePaths"] = "CACHE_PATHS"; +})(State || (exports.State = State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs || (exports.Outputs = Outputs = {})); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), -/***/ 4569: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 92629: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ - value: true +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; })); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unique = exports.printEnvDetailsAndSetOutput = exports.getNodeVersionFromFile = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const exec = __importStar(__nccwpck_require__(71514)); +const io = __importStar(__nccwpck_require__(47351)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +function getNodeVersionFromFile(versionFilePath) { + var _a, _b, _c, _d, _e; + if (!fs_1.default.existsSync(versionFilePath)) { + throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); + } + const contents = fs_1.default.readFileSync(versionFilePath, 'utf8'); + // Try parsing the file as an NPM `package.json` file. + try { + const manifest = JSON.parse(contents); + // Presume package.json file. + if (typeof manifest === 'object' && !!manifest) { + // Support Volta. + // See https://docs.volta.sh/guide/understanding#managing-your-project + if ((_a = manifest.volta) === null || _a === void 0 ? void 0 : _a.node) { + return manifest.volta.node; + } + if ((_b = manifest.engines) === null || _b === void 0 ? void 0 : _b.node) { + return manifest.engines.node; + } + // Support Volta workspaces. + // See https://docs.volta.sh/advanced/workspaces + if ((_c = manifest.volta) === null || _c === void 0 ? void 0 : _c.extends) { + const extendedFilePath = path_1.default.resolve(path_1.default.dirname(versionFilePath), manifest.volta.extends); + core.info('Resolving node version from ' + extendedFilePath); + return getNodeVersionFromFile(extendedFilePath); + } + // If contents are an object, we parsed JSON + // this can happen if node-version-file is a package.json + // yet contains no volta.node or engines.node + // + // If node-version file is _not_ JSON, control flow + // will not have reached these lines. + // + // And because we've reached here, we know the contents + // *are* JSON, so no further string parsing makes sense. + return null; + } + } + catch (_f) { + core.info('Node version file is not JSON file'); + } + const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); + return (_e = (_d = found === null || found === void 0 ? void 0 : found.groups) === null || _d === void 0 ? void 0 : _d.version) !== null && _e !== void 0 ? _e : contents.trim(); +} +exports.getNodeVersionFromFile = getNodeVersionFromFile; +function printEnvDetailsAndSetOutput() { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Environment details'); + const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { + const pathTool = yield io.which(tool, false); + const output = pathTool ? yield getToolVersion(tool, ['--version']) : ''; + return { tool, output }; + })); + const tools = yield Promise.all(promises); + tools.forEach(({ tool, output }) => { + if (tool === 'node') { + core.setOutput(`${tool}-version`, output); + } + core.info(`${tool}: ${output}`); + }); + core.endGroup(); + }); } +exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; +function getToolVersion(tool, options) { + return __awaiter(this, void 0, void 0, function* () { + try { + const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { + ignoreReturnCode: true, + silent: true + }); + if (exitCode > 0) { + core.info(`[warning]${stderr}`); + return ''; + } + return stdout.trim(); + } + catch (err) { + return ''; + } + }); +} +const unique = () => { + const encountered = new Set(); + return (value) => { + if (encountered.has(value)) + return false; + encountered.add(value); + return true; + }; +}; +exports.unique = unique; -var _default = md5; -exports["default"] = _default; /***/ }), -/***/ 2054: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +/***/ 22877: +/***/ ((module) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +module.exports = eval("require")("encoding"); -var _default = { - randomUUID: _crypto.default.randomUUID -}; -exports["default"] = _default; /***/ }), -/***/ 5332: -/***/ ((__unused_webpack_module, exports) => { +/***/ 39491: +/***/ ((module) => { "use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; +module.exports = require("assert"); /***/ }), -/***/ 2746: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 50852: +/***/ ((module) => { "use strict"; +module.exports = require("async_hooks"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ 14300: +/***/ ((module) => { -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); +"use strict"; +module.exports = require("buffer"); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ }), -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +/***/ 32081: +/***/ ((module) => { - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ +"use strict"; +module.exports = require("child_process"); - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ +/***/ }), - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ +/***/ 96206: +/***/ ((module) => { - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ +"use strict"; +module.exports = require("console"); - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) +/***/ }), - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} +/***/ 6113: +/***/ ((module) => { -var _default = parse; -exports["default"] = _default; +"use strict"; +module.exports = require("crypto"); /***/ }), -/***/ 814: -/***/ ((__unused_webpack_module, exports) => { +/***/ 67643: +/***/ ((module) => { "use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; +module.exports = require("diagnostics_channel"); /***/ }), -/***/ 807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 82361: +/***/ ((module) => { "use strict"; +module.exports = require("events"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; +/***/ 57147: +/***/ ((module) => { -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +"use strict"; +module.exports = require("fs"); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ }), -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate +/***/ 13685: +/***/ ((module) => { -let poolPtr = rnds8Pool.length; +"use strict"; +module.exports = require("http"); -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); +/***/ }), - poolPtr = 0; - } +/***/ 85158: +/***/ ((module) => { - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} +"use strict"; +module.exports = require("http2"); /***/ }), -/***/ 5274: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 95687: +/***/ ((module) => { "use strict"; +module.exports = require("https"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +/***/ 41808: +/***/ ((module) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +"use strict"; +module.exports = require("net"); -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } +/***/ }), - return _crypto.default.createHash('sha1').update(bytes).digest(); -} +/***/ 15673: +/***/ ((module) => { -var _default = sha1; -exports["default"] = _default; +"use strict"; +module.exports = require("node:events"); /***/ }), -/***/ 8950: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 88849: +/***/ ((module) => { "use strict"; +module.exports = require("node:http"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -exports.unsafeStringify = unsafeStringify; - -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 22286: +/***/ ((module) => { -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; +"use strict"; +module.exports = require("node:https"); -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).slice(1)); -} +/***/ }), -function unsafeStringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; -} +/***/ 70612: +/***/ ((module) => { -function stringify(arr, offset = 0) { - const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +"use strict"; +module.exports = require("node:os"); - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } +/***/ }), - return uuid; -} +/***/ 97742: +/***/ ((module) => { -var _default = stringify; -exports["default"] = _default; +"use strict"; +module.exports = require("node:process"); /***/ }), -/***/ 8628: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 84492: +/***/ ((module) => { "use strict"; +module.exports = require("node:stream"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ 47261: +/***/ ((module) => { -var _rng = _interopRequireDefault(__nccwpck_require__(807)); +"use strict"; +module.exports = require("node:util"); -var _stringify = __nccwpck_require__(8950); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 65628: +/***/ ((module) => { -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; +"use strict"; +module.exports = require("node:zlib"); -let _clockseq; // Previous uuid creation time +/***/ }), +/***/ 22037: +/***/ ((module) => { -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details +"use strict"; +module.exports = require("os"); -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 +/***/ }), - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); +/***/ 71017: +/***/ ((module) => { - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } +"use strict"; +module.exports = require("path"); - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. +/***/ }), +/***/ 4074: +/***/ ((module) => { - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock +"use strict"; +module.exports = require("perf_hooks"); - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) +/***/ }), - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression +/***/ 85477: +/***/ ((module) => { - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval +"use strict"; +module.exports = require("punycode"); +/***/ }), - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested +/***/ 63477: +/***/ ((module) => { +"use strict"; +module.exports = require("querystring"); - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } +/***/ }), - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch +/***/ 12781: +/***/ ((module) => { - msecs += 12219292800000; // `time_low` +"use strict"; +module.exports = require("stream"); - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` +/***/ }), - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` +/***/ 35356: +/***/ ((module) => { - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version +"use strict"; +module.exports = require("stream/web"); - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) +/***/ }), - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` +/***/ 71576: +/***/ ((module) => { - b[i++] = clockseq & 0xff; // `node` +"use strict"; +module.exports = require("string_decoder"); - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } +/***/ }), - return buf || (0, _stringify.unsafeStringify)(b); -} +/***/ 39512: +/***/ ((module) => { -var _default = v1; -exports["default"] = _default; +"use strict"; +module.exports = require("timers"); /***/ }), -/***/ 6409: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 24404: +/***/ ((module) => { "use strict"; +module.exports = require("tls"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ 76224: +/***/ ((module) => { -var _v = _interopRequireDefault(__nccwpck_require__(5998)); +"use strict"; +module.exports = require("tty"); -var _md = _interopRequireDefault(__nccwpck_require__(4569)); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 57310: +/***/ ((module) => { -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; +"use strict"; +module.exports = require("url"); /***/ }), -/***/ 5998: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 73837: +/***/ ((module) => { "use strict"; +module.exports = require("util"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports.URL = exports.DNS = void 0; -exports["default"] = v35; +/***/ 29830: +/***/ ((module) => { -var _stringify = __nccwpck_require__(8950); +"use strict"; +module.exports = require("util/types"); -var _parse = _interopRequireDefault(__nccwpck_require__(2746)); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 71267: +/***/ ((module) => { -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape +"use strict"; +module.exports = require("worker_threads"); - const bytes = []; +/***/ }), - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } +/***/ 59796: +/***/ ((module) => { - return bytes; -} +"use strict"; +module.exports = require("zlib"); -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; +/***/ }), -function v35(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - var _namespace; +/***/ 31875: +/***/ ((__unused_webpack_module, exports) => { - if (typeof value === 'string') { - value = stringToBytes(value); - } +"use strict"; - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureKeyCredential = void 0; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +exports.AzureKeyCredential = AzureKeyCredential; +//# sourceMappingURL=azureKeyCredential.js.map - if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; +/***/ }), - if (buf) { - offset = offset || 0; +/***/ 51377: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } +"use strict"; - return buf; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureNamedKeyCredential = void 0; +exports.isNamedKeyCredential = isNamedKeyCredential; +const core_util_1 = __nccwpck_require__(80637); +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return ((0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map - return (0, _stringify.unsafeStringify)(bytes); - } // Function#name is not settable on some platforms (#270) - +/***/ }), - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support +/***/ 27182: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureSASCredential = void 0; +exports.isSASCredential = isSASCredential; +const core_util_1 = __nccwpck_require__(80637); +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +exports.AzureSASCredential = AzureSASCredential; +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return ((0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"); } +//# sourceMappingURL=azureSASCredential.js.map /***/ }), -/***/ 5122: +/***/ 98834: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isTokenCredential = exports.isSASCredential = exports.AzureSASCredential = exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = exports.isKeyCredential = exports.AzureKeyCredential = void 0; +var azureKeyCredential_js_1 = __nccwpck_require__(31875); +Object.defineProperty(exports, "AzureKeyCredential", ({ enumerable: true, get: function () { return azureKeyCredential_js_1.AzureKeyCredential; } })); +var keyCredential_js_1 = __nccwpck_require__(59122); +Object.defineProperty(exports, "isKeyCredential", ({ enumerable: true, get: function () { return keyCredential_js_1.isKeyCredential; } })); +var azureNamedKeyCredential_js_1 = __nccwpck_require__(51377); +Object.defineProperty(exports, "AzureNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; } })); +Object.defineProperty(exports, "isNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.isNamedKeyCredential; } })); +var azureSASCredential_js_1 = __nccwpck_require__(27182); +Object.defineProperty(exports, "AzureSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.AzureSASCredential; } })); +Object.defineProperty(exports, "isSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.isSASCredential; } })); +var tokenCredential_js_1 = __nccwpck_require__(39162); +Object.defineProperty(exports, "isTokenCredential", ({ enumerable: true, get: function () { return tokenCredential_js_1.isTokenCredential; } })); +//# sourceMappingURL=index.js.map -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ }), -var _native = _interopRequireDefault(__nccwpck_require__(2054)); +/***/ 59122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var _rng = _interopRequireDefault(__nccwpck_require__(807)); +"use strict"; -var _stringify = __nccwpck_require__(8950); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isKeyCredential = isKeyCredential; +const core_util_1 = __nccwpck_require__(80637); +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +function isKeyCredential(credential) { + return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; +} +//# sourceMappingURL=keyCredential.js.map -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ }), -function v4(options, buf, offset) { - if (_native.default.randomUUID && !buf && !options) { - return _native.default.randomUUID(); - } +/***/ 39162: +/***/ ((__unused_webpack_module, exports) => { - options = options || {}; +"use strict"; - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isBearerToken = isBearerToken; +exports.isPopToken = isPopToken; +exports.isTokenCredential = isTokenCredential; +/** + * @internal + * @param accessToken - Access token + * @returns Whether a token is bearer type or not + */ +function isBearerToken(accessToken) { + return !accessToken.tokenType || accessToken.tokenType === "Bearer"; +} +/** + * @internal + * @param accessToken - Access token + * @returns Whether a token is Pop token or not + */ +function isPopToken(accessToken) { + return accessToken.tokenType === "pop"; +} +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map +/***/ }), - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided +/***/ 94873: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (buf) { - offset = offset || 0; +"use strict"; - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.authorizeRequestOnClaimChallenge = exports.parseCAEChallenge = void 0; +const log_js_1 = __nccwpck_require__(53776); +const base64_js_1 = __nccwpck_require__(23442); +/** + * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. + * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. + * + * @internal + */ +function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); + }); +} +exports.parseCAEChallenge = parseCAEChallenge; +/** + * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: + * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). + * + * Call the `bearerTokenAuthenticationPolicy` with the following options: + * + * ```ts + * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; + * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; + * + * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ + * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge + * }); + * ``` + * + * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. + * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. + * + * Example challenge with claims: + * + * ``` + * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", + * error_description="User session has been revoked", + * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" + * ``` + */ +async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || log_js_1.logger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims), + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + return true; +} +exports.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; +//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map - return buf; - } +/***/ }), - return (0, _stringify.unsafeStringify)(rnds); +/***/ 16576: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.authorizeRequestOnTenantChallenge = void 0; +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); +} +/** + * Defines a callback to handle auth challenge for Storage APIs. + * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge + * Handling has specific features for storage that departs to the general AAD challenge docs. + **/ +const authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); + return true; + } + return false; +}; +exports.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; +/** + * Extracts the tenant id from the challenge information + * The tenant id is contained in the authorization_uri as the first + * path part. + */ +function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return undefined; +} +/** + * Builds the authentication scopes based on the information that comes in the + * challenge information. Scopes url is present in the resource_id, if it is empty + * we keep using the original scopes. + */ +function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + // the extra slash is required by the service + scope = "https://disk.azure.com//.default"; + } + return [scope]; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); } +/** + * Extracts the options form a Pipeline Request for later re-use + */ +function requestToOptions(request) { + return { + abortSignal: request.abortSignal, + requestOptions: { + timeout: request.timeout, + }, + tracingOptions: request.tracingOptions, + }; +} +//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map -var _default = v4; -exports["default"] = _default; +/***/ }), + +/***/ 23442: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decodeStringToString = exports.decodeString = exports.encodeByteArray = exports.encodeString = void 0; +/** + * Encodes a string in base64 format. + * @param value - the string to encode + * @internal + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +exports.encodeString = encodeString; +/** + * Encodes a byte array in base64 format. + * @param value - the Uint8Aray to encode + * @internal + */ +function encodeByteArray(value) { + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +exports.encodeByteArray = encodeByteArray; +/** + * Decodes a base64 string into a byte array. + * @param value - the base64 string to decode + * @internal + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} +exports.decodeString = decodeString; +/** + * Decodes a base64 string into a string. + * @param value - the base64 string to decode + * @internal + */ +function decodeStringToString(value) { + return Buffer.from(value, "base64").toString(); +} +exports.decodeStringToString = decodeStringToString; +//# sourceMappingURL=base64.js.map /***/ }), -/***/ 9120: +/***/ 25315: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deserializationPolicy = exports.deserializationPolicyName = void 0; +const interfaces_js_1 = __nccwpck_require__(8153); +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const serializer_js_1 = __nccwpck_require__(63566); +const operationHelpers_js_1 = __nccwpck_require__(62074); +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * The programmatic identifier of the deserializationPolicy. + */ +exports.deserializationPolicyName = "deserializationPolicy"; +/** + * This policy handles parsing out responses according to OperationSpecs on the request. + */ +function deserializationPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; + const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", + includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, + xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : interfaces_js_1.XML_CHARKEY, + }, + }; + return { + name: exports.deserializationPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + }, + }; +} +exports.deserializationPolicy = deserializationPolicy; +function getOperationResponseMap(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (operationSpec) { + if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const request = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (deserializeError) { + const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); + } + } + return parsedResponse; +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new core_rest_pipeline_1.RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); + } + const internalError = parsedBody.error || deserializedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = deserializedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = + operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + var _a; + if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && + operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + try { + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } + } + catch (err) { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR; + const e = new core_rest_pipeline_1.RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse, + }); + throw e; + } + } + return operationResponse; +} +//# sourceMappingURL=deserializationPolicy.js.map -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(5998)); +/***/ }), -var _sha = _interopRequireDefault(__nccwpck_require__(5274)); +/***/ 30308: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +"use strict"; -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCachedDefaultHttpClient = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); + } + return cachedHttpClient; +} +exports.getCachedDefaultHttpClient = getCachedDefaultHttpClient; +//# sourceMappingURL=httpClientCache.js.map /***/ }), -/***/ 6900: +/***/ 7611: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.authorizeRequestOnTenantChallenge = exports.authorizeRequestOnClaimChallenge = exports.serializationPolicyName = exports.serializationPolicy = exports.deserializationPolicyName = exports.deserializationPolicy = exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.createClientPipeline = exports.ServiceClient = exports.MapperTypeNames = exports.createSerializer = void 0; +var serializer_js_1 = __nccwpck_require__(63566); +Object.defineProperty(exports, "createSerializer", ({ enumerable: true, get: function () { return serializer_js_1.createSerializer; } })); +Object.defineProperty(exports, "MapperTypeNames", ({ enumerable: true, get: function () { return serializer_js_1.MapperTypeNames; } })); +var serviceClient_js_1 = __nccwpck_require__(28927); +Object.defineProperty(exports, "ServiceClient", ({ enumerable: true, get: function () { return serviceClient_js_1.ServiceClient; } })); +var pipeline_js_1 = __nccwpck_require__(33924); +Object.defineProperty(exports, "createClientPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createClientPipeline; } })); +var interfaces_js_1 = __nccwpck_require__(8153); +Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_ATTRKEY; } })); +Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_CHARKEY; } })); +var deserializationPolicy_js_1 = __nccwpck_require__(25315); +Object.defineProperty(exports, "deserializationPolicy", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicy; } })); +Object.defineProperty(exports, "deserializationPolicyName", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicyName; } })); +var serializationPolicy_js_1 = __nccwpck_require__(96625); +Object.defineProperty(exports, "serializationPolicy", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicy; } })); +Object.defineProperty(exports, "serializationPolicyName", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicyName; } })); +var authorizeRequestOnClaimChallenge_js_1 = __nccwpck_require__(94873); +Object.defineProperty(exports, "authorizeRequestOnClaimChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; } })); +var authorizeRequestOnTenantChallenge_js_1 = __nccwpck_require__(16576); +Object.defineProperty(exports, "authorizeRequestOnTenantChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; } })); +//# sourceMappingURL=index.js.map -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ }), -var _regex = _interopRequireDefault(__nccwpck_require__(814)); +/***/ 41459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +"use strict"; -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPathStringFromParameter = exports.getStreamingResponseStatusCodes = void 0; +const serializer_js_1 = __nccwpck_require__(63566); +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamingResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +exports.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + * @internal + */ +function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; } +exports.getPathStringFromParameter = getPathStringFromParameter; +//# sourceMappingURL=interfaceHelpers.js.map -var _default = validate; -exports["default"] = _default; +/***/ }), + +/***/ 8153: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; +/** + * Default key used to access the XML attributes. + */ +exports.XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +exports.XML_CHARKEY = "_"; +//# sourceMappingURL=interfaces.js.map /***/ }), -/***/ 2414: +/***/ 53776: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +const logger_1 = __nccwpck_require__(89497); +exports.logger = (0, logger_1.createClientLogger)("core-client"); +//# sourceMappingURL=log.js.map -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 62074: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +"use strict"; - return parseInt(uuid.slice(14, 15), 16); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOperationRequestInfo = exports.getOperationArgumentValueFromParameter = void 0; +const state_js_1 = __nccwpck_require__(35429); +/** + * @internal + * Retrieves the value to use for a given operation argument + * @param operationArguments - The arguments passed from the generated client + * @param parameter - The parameter description + * @param fallbackObject - If something isn't found in the arguments bag, look here. + * Generally used to look at the service client properties. + */ +function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper, + }, fallbackObject); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +exports.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +const originalRequestSymbol = Symbol.for("@azure/core-client original request"); +function hasOriginalRequest(request) { + return originalRequestSymbol in request; +} +function getOperationRequestInfo(request) { + if (hasOriginalRequest(request)) { + return getOperationRequestInfo(request[originalRequestSymbol]); + } + let info = state_js_1.state.operationRequestMap.get(request); + if (!info) { + info = {}; + state_js_1.state.operationRequestMap.set(request, info); + } + return info; } +exports.getOperationRequestInfo = getOperationRequestInfo; +//# sourceMappingURL=operationHelpers.js.map -var _default = version; -exports["default"] = _default; +/***/ }), + +/***/ 33924: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createClientPipeline = void 0; +const deserializationPolicy_js_1 = __nccwpck_require__(25315); +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const serializationPolicy_js_1 = __nccwpck_require__(96625); +/** + * Creates a new Pipeline for use with a Service Client. + * Adds in deserializationPolicy by default. + * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. + * @param options - Options to customize the created pipeline. + */ +function createClientPipeline(options = {}) { + const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options !== null && options !== void 0 ? options : {}); + if (options.credentialOptions) { + pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes, + })); + } + pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), { + phase: "Deserialize", + }); + return pipeline; +} +exports.createClientPipeline = createClientPipeline; +//# sourceMappingURL=pipeline.js.map /***/ }), -/***/ 4886: -/***/ ((module) => { +/***/ 96625: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -var conversions = {}; -module.exports = conversions; - -function sign(x) { - return x < 0 ? -1 : 1; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializeRequestBody = exports.serializeHeaders = exports.serializationPolicy = exports.serializationPolicyName = void 0; +const interfaces_js_1 = __nccwpck_require__(8153); +const operationHelpers_js_1 = __nccwpck_require__(62074); +const serializer_js_1 = __nccwpck_require__(63566); +const interfaceHelpers_js_1 = __nccwpck_require__(41459); +/** + * The programmatic identifier of the serializationPolicy. + */ +exports.serializationPolicyName = "serializationPolicy"; +/** + * This policy handles assembling the request body and headers using + * an OperationSpec and OperationArguments on the request. + */ +function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: exports.serializationPolicyName, + async sendRequest(request, next) { + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request, operationArguments, operationSpec); + serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); + } + return next(request); + }, + }; } - -function evenRound(x) { - // Round x to the nearest integer, choosing the even integer if it lies halfway between two. - if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) - return Math.floor(x); - } else { - return Math.round(x); +exports.serializationPolicy = serializationPolicy; +/** + * @internal + */ +function serializeHeaders(request, operationArguments, operationSpec) { + var _a, _b; + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); + if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + request.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue); + } + } + } + } + const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request.headers.set(customHeaderName, customHeaders[customHeaderName]); + } } } - -function createNumberConversion(bitLength, typeOpts) { - if (!typeOpts.unsigned) { - --bitLength; +exports.serializeHeaders = serializeHeaders; +/** + * @internal + */ +function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { + throw new Error("XML serialization unsupported!"); +}) { + var _a, _b, _c, _d, _e; + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", + includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, + xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : interfaces_js_1.XML_CHARKEY, + }, + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((request.body !== undefined && request.body !== null) || + (nullable && request.body === null) || + required) { + const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody); + request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === serializer_js_1.MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); + if (typeName === serializer_js_1.MapperTypeNames.Sequence) { + request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } + else if (!isStream) { + request.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === serializer_js_1.MapperTypeNames.String && + (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + request.body = JSON.stringify(request.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } } - const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); - const upperBound = Math.pow(2, bitLength) - 1; + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter); + request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions); + } + } + } +} +exports.serializeRequestBody = serializeRequestBody; +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +//# sourceMappingURL=serializationPolicy.js.map - const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); - const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); +/***/ }), - return function(V, opts) { - if (!opts) opts = {}; +/***/ 63566: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let x = +V; +"use strict"; - if (opts.enforceRange) { - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite number"); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MapperTypeNames = exports.createSerializer = void 0; +const tslib_1 = __nccwpck_require__(4351); +const base64 = tslib_1.__importStar(__nccwpck_require__(23442)); +const interfaces_js_1 = __nccwpck_require__(8153); +const utils_js_1 = __nccwpck_require__(25363); +class SerializerImpl { + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== undefined && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); } - - x = sign(x) * Math.floor(Math.abs(x)); - if (x < lowerBound || x > upperBound) { - throw new TypeError("Argument is not in byte range"); + if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); } - - return x; } - - if (!isNaN(x) && opts.clamp) { - x = evenRound(x); - - if (x < lowerBound) x = lowerBound; - if (x > upperBound) x = upperBound; - return x; + } + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + var _a, _b, _c; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, + }, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === undefined || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === undefined || object === null) { + payload = object; } - - if (!Number.isFinite(x) || x === 0) { - return 0; + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } } - - x = sign(x) * Math.floor(Math.abs(x)); - x = x % moduloVal; - - if (!typeOpts.unsigned && x >= moduloBound) { - return x - moduloVal; - } else if (typeOpts.unsigned) { - if (x < 0) { - x += moduloVal; - } else if (x === -0) { // don't return negative zero - return 0; + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param responseBody - A valid Javascript entity to be deserialized + * + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object + */ + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + var _a, _b, _c, _d; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, + }, + ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, + }; + if (responseBody === undefined || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; } - - return x; + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (responseBody[interfaces_js_1.XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; } } - -conversions["void"] = function () { - return undefined; -}; - -conversions["boolean"] = function (val) { - return !!val; -}; - -conversions["byte"] = createNumberConversion(8, { unsigned: false }); -conversions["octet"] = createNumberConversion(8, { unsigned: true }); - -conversions["short"] = createNumberConversion(16, { unsigned: false }); -conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); - -conversions["long"] = createNumberConversion(32, { unsigned: false }); -conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); - -conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); -conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); - -conversions["double"] = function (V) { - const x = +V; - - if (!Number.isFinite(x)) { - throw new TypeError("Argument is not a finite floating-point value"); +/** + * Method that creates and returns a Serializer. + * @param modelMappers - Known models to map + * @param isXML - If XML should be supported + */ +function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); +} +exports.createSerializer = createSerializer; +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; } - - return x; -}; - -conversions["unrestricted double"] = function (V) { - const x = +V; - - if (isNaN(x)) { - throw new TypeError("Argument is NaN"); + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; } - - return x; -}; - -// not quite valid, but good enough for JS -conversions["float"] = conversions["double"]; -conversions["unrestricted float"] = conversions["unrestricted double"]; - -conversions["DOMString"] = function (V, opts) { - if (!opts) opts = {}; - - if (opts.treatNullAsEmptyString && V === null) { - return ""; + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); } - - return String(V); -}; - -conversions["ByteString"] = function (V, opts) { - const x = String(V); - let c = undefined; - for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { - if (c > 255) { - throw new TypeError("Argument is not a valid bytestring"); + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } } } - - return x; -}; - -conversions["USVString"] = function (V) { - const S = String(V); - const n = S.length; - const U = []; - for (let i = 0; i < n; ++i) { - const c = S.charCodeAt(i); - if (c < 0xD800 || c > 0xDFFF) { - U.push(String.fromCodePoint(c)); - } else if (0xDC00 <= c && c <= 0xDFFF) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - if (i === n - 1) { - U.push(String.fromCodePoint(0xFFFD)); - } else { - const d = S.charCodeAt(i + 1); - if (0xDC00 <= d && d <= 0xDFFF) { - const a = c & 0x3FF; - const b = d & 0x3FF; - U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); - ++i; - } else { - U.push(String.fromCodePoint(0xFFFD)); + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && + objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value !== undefined && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!(0, utils_js_1.isDuration)(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a; + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== undefined && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === undefined || childObject === null) && + ((object[key] !== undefined && object[key] !== null) || + propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== undefined && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[interfaces_js_1.XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[interfaces_js_1.XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + (toSerialize === undefined || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName !== undefined && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {}; + parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); } } } + return payload; } - - return U.join(''); -}; - -conversions["Date"] = function (V, opts) { - if (!(V instanceof Date)) { - throw new TypeError("Argument is not a Date object"); + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; } - if (isNaN(V)) { - return undefined; + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[interfaces_js_1.XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; + } } - - return V; -}; - -conversions["RegExp"] = function (V, opts) { - if (!(V instanceof RegExp)) { - V = new RegExp(V); + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : interfaces_js_1.XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + // only accept null when reaching the last position of object otherwise it would be undefined + if (res === null && steps < paths.length) { + res = undefined; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + (propertyInstance === undefined || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } } - - return V; + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + /* jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + var _a; + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (element.type.name === "Composite" && element.type.className) { + element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName + ? discriminatorValue + : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } + else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && + mapper.type.uberParent === currentName && + mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } + } + } + } + return undefined; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var _a; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + // The serializedName might have \\, which we just want to ignore + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Known types of Mappers + */ +exports.MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime", }; - +//# sourceMappingURL=serializer.js.map /***/ }), -/***/ 7537: +/***/ 28927: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const usm = __nccwpck_require__(2158); - -exports.implementation = class URLImpl { - constructor(constructorArgs) { - const url = constructorArgs[0]; - const base = constructorArgs[1]; - - let parsedBase = null; - if (base !== undefined) { - parsedBase = usm.basicURLParse(base); - if (parsedBase === "failure") { - throw new TypeError("Invalid base URL"); - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceClient = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const pipeline_js_1 = __nccwpck_require__(33924); +const utils_js_1 = __nccwpck_require__(25363); +const httpClientCache_js_1 = __nccwpck_require__(30308); +const operationHelpers_js_1 = __nccwpck_require__(62074); +const urlHelpers_js_1 = __nccwpck_require__(98258); +const interfaceHelpers_js_1 = __nccwpck_require__(41459); +const log_js_1 = __nccwpck_require__(53776); +/** + * Initializes a new instance of the ServiceClient. + */ +class ServiceClient { + /** + * The ServiceClient constructor + * @param credential - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + var _a, _b; + this._requestContentType = options.requestContentType; + this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; + if (options.baseUri) { + log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { + for (const { policy, position } of options.additionalPolicies) { + // Sign happens after Retry and is commonly needed to occur + // before policies that intercept post-retry. + const afterPhase = position === "perRetry" ? "Sign" : undefined; + this.pipeline.addPolicy(policy, { + afterPhase, + }); + } + } + } + /** + * Send the provided httpRequest. + */ + async sendRequest(request) { + return this.pipeline.sendRequest(this._httpClient, request); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint = operationSpec.baseUrl || this._endpoint; + if (!endpoint) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); + } + // Templatized URLs sometimes reference properties on the ServiceClient child class, + // so we have to pass `this` below in order to search these properties if they're + // not part of OperationArguments + const url = (0, urlHelpers_js_1.getRequestUrl)(endpoint, operationSpec, operationArguments, this); + const request = (0, core_rest_pipeline_1.createPipelineRequest)({ + url, + }); + request.method = operationSpec.httpMethod; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== undefined) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request.allowInsecureConnection = true; + } + if (request.streamResponseStatusCodes === undefined) { + request.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request); + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } + catch (error) { + if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { + const rawResponse = error.response; + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); + error.details = flatResponse; + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse, error); + } + } + throw error; + } } - - const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); +} +exports.ServiceClient = ServiceClient; +function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes + ? { credentialScopes, credential: options.credential } + : undefined; + return (0, pipeline_js_1.createClientPipeline)(Object.assign(Object.assign({}, options), { credentialOptions })); +} +function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; } - - this._url = parsedURL; - - // TODO: query stuff - } - - get href() { - return usm.serializeURL(this._url); - } - - set href(v) { - const parsedURL = usm.basicURLParse(v); - if (parsedURL === "failure") { - throw new TypeError("Invalid URL"); + if (options.endpoint) { + return `${options.endpoint}/.default`; } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map - this._url = parsedURL; - } - - get origin() { - return usm.serializeURLOrigin(this._url); - } +/***/ }), - get protocol() { - return this._url.scheme + ":"; - } +/***/ 35429: +/***/ ((__unused_webpack_module, exports) => { - set protocol(v) { - usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); - } +"use strict"; - get username() { - return this._url.username; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.state = void 0; +/** + * Holds the singleton operationRequestMap, to be shared across CJS and ESM imports. + */ +exports.state = { + operationRequestMap: new WeakMap(), +}; +//# sourceMappingURL=state-cjs.cjs.map - set username(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; - } +/***/ }), - usm.setTheUsername(this._url, v); - } +/***/ 98258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - get password() { - return this._url.password; - } +"use strict"; - set password(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.appendQueryParams = exports.getRequestUrl = void 0; +const operationHelpers_js_1 = __nccwpck_require__(62074); +const interfaceHelpers_js_1 = __nccwpck_require__(41459); +const CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: "\t", + Pipes: "|", +}; +function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path = replaceAll(operationSpec.path, urlReplacements); + // QUIRK: sometimes we get a path component like /{nextLink} + // which may be a fully formed URL with a leading /. In that case, we should + // remove the leading / + if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { + path = path.substring(1); + } + // QUIRK: sometimes we get a path component like {nextLink} + // which may be a fully formed URL. In that case, we should + // ignore the baseUri. + if (isAbsoluteUrl(path)) { + requestUrl = path; + isAbsolutePath = true; + } + else { + requestUrl = appendPath(requestUrl, path); + } } - - usm.setThePassword(this._url, v); - } - - get host() { - const url = this._url; - - if (url.host === null) { - return ""; + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + /** + * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` + * is an absolute path. This ensures that existing query parameter values in `requestUrl` + * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it + * is still being built so there is nothing to overwrite. + */ + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; +} +exports.getRequestUrl = getRequestUrl; +function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); } - - if (url.port === null) { - return usm.serializeHost(url.host); + return result; +} +function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); + const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); + } } - - return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); - } - - set host(v) { - if (this._url.cannotBeABaseURL) { - return; + return result; +} +function isAbsoluteUrl(url) { + return url.includes("://"); +} +function appendPath(url, pathToAppend) { + if (!pathToAppend) { + return url; } - - usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); - } - - get hostname() { - if (this._url.host === null) { - return ""; + const parsedUrl = new URL(url); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; } - - return usm.serializeHost(this._url.host); - } - - set hostname(v) { - if (this._url.cannotBeABaseURL) { - return; + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); } - - usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); - } - - get port() { - if (this._url.port === null) { - return ""; + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } } - - return usm.serializeInteger(this._url.port); - } - - set port(v) { - if (usm.cannotHaveAUsernamePasswordPort(this._url)) { - return; + else { + newPath = newPath + pathToAppend; } - - if (v === "") { - this._url.port = null; - } else { - usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + parsedUrl.pathname = newPath; + return parsedUrl.toString(); +} +function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + const sequenceParams = new Set(); + if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); + } + let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); + if ((queryParameterValue !== undefined && queryParameterValue !== null) || + queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter)); + const delimiter = queryParameter.collectionFormat + ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] + : ""; + if (Array.isArray(queryParameterValue)) { + // replace null and undefined + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === undefined) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } + else if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + // Join pipes and CSV *after* encoding, or the server will be upset. + if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue); + } + } } - } - - get pathname() { - if (this._url.cannotBeABaseURL) { - return this._url.path[0]; + return { + queryParams: result, + sequenceParams, + }; +} +function simpleParseQueryParams(queryString) { + const result = new Map(); + if (!queryString || queryString[0] !== "?") { + return result; } - - if (this._url.path.length === 0) { - return ""; + // remove the leading ? + queryString = queryString.slice(1); + const pairs = queryString.split("&"); + for (const pair of pairs) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } + else { + result.set(name, [existingValue, value]); + } + } + else { + result.set(name, value); + } } - - return "/" + this._url.path.join("/"); - } - - set pathname(v) { - if (this._url.cannotBeABaseURL) { - return; + return result; +} +/** @internal */ +function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url; } - - this._url.path = []; - usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); - } - - get search() { - if (this._url.query === null || this._url.query === "") { - return ""; + const parsedUrl = new URL(url); + // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which + // can change their meaning to the server, such as in the case of a SAS signature. + // To avoid accidentally un-encoding a query param, we parse the key/values ourselves + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } + else { + existingValue.push(value); + } + } + else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } + else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } + } + else { + combinedParams.set(name, value); + } } - - return "?" + this._url.query; - } - - set search(v) { - // TODO: query stuff - - const url = this._url; - - if (v === "") { - url.query = null; - return; + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } + else if (Array.isArray(value)) { + // QUIRK: If we get an array of values, include multiple key/value pairs + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } + else { + searchPieces.push(`${name}=${value}`); + } } + // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); +} +exports.appendQueryParams = appendQueryParams; +//# sourceMappingURL=urlHelpers.js.map - const input = v[0] === "?" ? v.substring(1) : v; - url.query = ""; - usm.basicURLParse(input, { url, stateOverride: "query" }); - } +/***/ }), - get hash() { - if (this._url.fragment === null || this._url.fragment === "") { - return ""; - } +/***/ 25363: +/***/ ((__unused_webpack_module, exports) => { - return "#" + this._url.fragment; - } +"use strict"; - set hash(v) { - if (v === "") { - this._url.fragment = null; - return; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.flattenResponse = exports.isValidUuid = exports.isDuration = exports.isPrimitiveBody = void 0; +/** + * A type guard for a primitive response body. + * @param value - Value to test + * + * @internal + */ +function isPrimitiveBody(value, mapperTypeName) { + return (mapperTypeName !== "Composite" && + mapperTypeName !== "Dictionary" && + (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== + null || + value === undefined || + value === null)); +} +exports.isPrimitiveBody = isPrimitiveBody; +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Returns true if the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @internal + */ +function isDuration(value) { + return validateISODuration.test(value); +} +exports.isDuration = isDuration; +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Returns true if the provided uuid is valid. + * + * @param uuid - The uuid that needs to be validated. + * + * @internal + */ +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +exports.isValidUuid = isValidUuid; +/** + * Maps the response as follows: + * - wraps the response body if needed (typically if its type is primitive). + * - returns null if the combination of the headers and the body is empty. + * - otherwise, returns the combination of the headers and the body. + * + * @param responseObject - a representation of the parsed response + * @returns the response that will be returned to the user which can be null and/or wrapped + * + * @internal + */ +function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); + if (responseObject.hasNullableType && + Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; } - - const input = v[0] === "#" ? v.substring(1) : v; - this._url.fragment = ""; - usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); - } - - toJSON() { - return this.href; - } -}; - + else { + return responseObject.shouldWrapBody + ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; + } +} +/** + * Take a `FullOperationResponse` and turn it into a flat + * response object to hand back to the consumer. + * @param fullResponse - The processed response from the operation request + * @param responseSpec - The response map from the OperationSpec + * + * @internal + */ +function flattenResponse(fullResponse, responseSpec) { + var _a, _b; + const parsedHeaders = fullResponse.parsedHeaders; + // head methods never have a body, but we return a boolean set to body property + // to indicate presence/absence of the resource + if (fullResponse.request.method === "HEAD") { + return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); + } + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); + const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; + /** If the body is asked for, we look at the expected body type to handle it */ + if (expectedBodyTypeName === "Stream") { + return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); + } + const modelProperties = (expectedBodyTypeName === "Composite" && + bodyMapper.type.modelProperties) || + {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + return isNullable && + !fullResponse.parsedBody && + !parsedHeaders && + Object.getOwnPropertyNames(modelProperties).length === 0 + ? null + : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), + }); +} +exports.flattenResponse = flattenResponse; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 3394: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35064: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExtendedServiceClient = void 0; +const disableKeepAlivePolicy_js_1 = __nccwpck_require__(66899); +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const core_client_1 = __nccwpck_require__(7611); +const response_js_1 = __nccwpck_require__(49283); +/** + * Client to provide compatability between core V1 & V2. + */ +class ExtendedServiceClient extends core_client_1.ServiceClient { + constructor(options) { + var _a, _b; + super(options); + if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && + !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) { + this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)()); + } + if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { + this.pipeline.removePolicy({ + name: core_rest_pipeline_1.redirectPolicyName, + }); + } + } + /** + * Compatible send operation request function. + * + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns + */ + async sendOperationRequest(operationArguments, operationSpec) { + var _a; + const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error); + } + } + operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: (0, response_js_1.toCompatResponse)(lastResponse), + }); + } + return result; + } +} +exports.ExtendedServiceClient = ExtendedServiceClient; +//# sourceMappingURL=extendedClient.js.map -const conversions = __nccwpck_require__(4886); -const utils = __nccwpck_require__(3185); -const Impl = __nccwpck_require__(7537); +/***/ }), -const impl = utils.implSymbol; +/***/ 23256: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function URL(url) { - if (!this || this[impl] || !(this instanceof URL)) { - throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); - } - if (arguments.length < 1) { - throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); - } - const args = []; - for (let i = 0; i < arguments.length && i < 2; ++i) { - args[i] = arguments[i]; - } - args[0] = conversions["USVString"](args[0]); - if (args[1] !== undefined) { - args[1] = conversions["USVString"](args[1]); - } +"use strict"; - module.exports.setup(this, args); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertHttpClient = void 0; +const response_js_1 = __nccwpck_require__(49283); +const util_js_1 = __nccwpck_require__(43732); +/** + * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. + * @param requestPolicyClient - A HttpClient compatible with core-http + * @returns A HttpClient compatible with core-rest-pipeline + */ +function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request) => { + const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request, { createProxy: true })); + return (0, response_js_1.toPipelineResponse)(response); + }, + }; } +exports.convertHttpClient = convertHttpClient; +//# sourceMappingURL=httpClientAdapter.js.map -URL.prototype.toJSON = function toJSON() { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - const args = []; - for (let i = 0; i < arguments.length && i < 0; ++i) { - args[i] = arguments[i]; - } - return this[impl].toJSON.apply(this[impl], args); -}; -Object.defineProperty(URL.prototype, "href", { - get() { - return this[impl].href; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].href = V; - }, - enumerable: true, - configurable: true -}); - -URL.prototype.toString = function () { - if (!this || !module.exports.is(this)) { - throw new TypeError("Illegal invocation"); - } - return this.href; -}; - -Object.defineProperty(URL.prototype, "origin", { - get() { - return this[impl].origin; - }, - enumerable: true, - configurable: true -}); - -Object.defineProperty(URL.prototype, "protocol", { - get() { - return this[impl].protocol; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].protocol = V; - }, - enumerable: true, - configurable: true -}); +/***/ }), -Object.defineProperty(URL.prototype, "username", { - get() { - return this[impl].username; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].username = V; - }, - enumerable: true, - configurable: true -}); +/***/ 25083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -Object.defineProperty(URL.prototype, "password", { - get() { - return this[impl].password; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].password = V; - }, - enumerable: true, - configurable: true -}); +"use strict"; -Object.defineProperty(URL.prototype, "host", { - get() { - return this[impl].host; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].host = V; - }, - enumerable: true, - configurable: true -}); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toHttpHeadersLike = exports.convertHttpClient = exports.disableKeepAlivePolicyName = exports.HttpPipelineLogLevel = exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.ExtendedServiceClient = void 0; +/** + * A Shim Library that provides compatibility between Core V1 & V2 Packages. + * + * @packageDocumentation + */ +var extendedClient_js_1 = __nccwpck_require__(35064); +Object.defineProperty(exports, "ExtendedServiceClient", ({ enumerable: true, get: function () { return extendedClient_js_1.ExtendedServiceClient; } })); +var requestPolicyFactoryPolicy_js_1 = __nccwpck_require__(98241); +Object.defineProperty(exports, "requestPolicyFactoryPolicyName", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; } })); +Object.defineProperty(exports, "createRequestPolicyFactoryPolicy", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; } })); +Object.defineProperty(exports, "HttpPipelineLogLevel", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; } })); +var disableKeepAlivePolicy_js_1 = __nccwpck_require__(66899); +Object.defineProperty(exports, "disableKeepAlivePolicyName", ({ enumerable: true, get: function () { return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; } })); +var httpClientAdapter_js_1 = __nccwpck_require__(23256); +Object.defineProperty(exports, "convertHttpClient", ({ enumerable: true, get: function () { return httpClientAdapter_js_1.convertHttpClient; } })); +var util_js_1 = __nccwpck_require__(43732); +Object.defineProperty(exports, "toHttpHeadersLike", ({ enumerable: true, get: function () { return util_js_1.toHttpHeadersLike; } })); +//# sourceMappingURL=index.js.map -Object.defineProperty(URL.prototype, "hostname", { - get() { - return this[impl].hostname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hostname = V; - }, - enumerable: true, - configurable: true -}); +/***/ }), -Object.defineProperty(URL.prototype, "port", { - get() { - return this[impl].port; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].port = V; - }, - enumerable: true, - configurable: true -}); +/***/ 66899: +/***/ ((__unused_webpack_module, exports) => { -Object.defineProperty(URL.prototype, "pathname", { - get() { - return this[impl].pathname; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].pathname = V; - }, - enumerable: true, - configurable: true -}); +"use strict"; -Object.defineProperty(URL.prototype, "search", { - get() { - return this[impl].search; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].search = V; - }, - enumerable: true, - configurable: true -}); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pipelineContainsDisableKeepAlivePolicy = exports.createDisableKeepAlivePolicy = exports.disableKeepAlivePolicyName = void 0; +exports.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; +function createDisableKeepAlivePolicy() { + return { + name: exports.disableKeepAlivePolicyName, + async sendRequest(request, next) { + request.disableKeepAlive = true; + return next(request); + }, + }; +} +exports.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; +/** + * @internal + */ +function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === exports.disableKeepAlivePolicyName); +} +exports.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; +//# sourceMappingURL=disableKeepAlivePolicy.js.map -Object.defineProperty(URL.prototype, "hash", { - get() { - return this[impl].hash; - }, - set(V) { - V = conversions["USVString"](V); - this[impl].hash = V; - }, - enumerable: true, - configurable: true -}); +/***/ }), +/***/ 98241: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = { - is(obj) { - return !!obj && obj[impl] instanceof Impl.implementation; - }, - create(constructorArgs, privateData) { - let obj = Object.create(URL.prototype); - this.setup(obj, constructorArgs, privateData); - return obj; - }, - setup(obj, constructorArgs, privateData) { - if (!privateData) privateData = {}; - privateData.wrapper = obj; +"use strict"; - obj[impl] = new Impl.implementation(constructorArgs, privateData); - obj[impl][utils.wrapperSymbol] = obj; - }, - interface: URL, - expose: { - Window: { URL: URL }, - Worker: { URL: URL } - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.HttpPipelineLogLevel = void 0; +const util_js_1 = __nccwpck_require__(43732); +const response_js_1 = __nccwpck_require__(49283); +/** + * An enum for compatibility with RequestPolicy + */ +var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; +})(HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); +const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, }; - - +/** + * The name of the RequestPolicyFactoryPolicy + */ +exports.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; +/** + * A policy that wraps policies written for core-http. + * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline + */ +function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: exports.requestPolicyFactoryPolicyName, + async sendRequest(request, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response = await next((0, util_js_1.toPipelineRequest)(httpRequest)); + return (0, response_js_1.toCompatResponse)(response, { createProxy: true }); + }, + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = (0, util_js_1.toWebResourceLike)(request, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return (0, response_js_1.toPipelineResponse)(response); + }, + }; +} +exports.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; +//# sourceMappingURL=requestPolicyFactoryPolicy.js.map /***/ }), -/***/ 8665: +/***/ 49283: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -exports.URL = __nccwpck_require__(3394)["interface"]; -exports.serializeURL = __nccwpck_require__(2158).serializeURL; -exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin; -exports.basicURLParse = __nccwpck_require__(2158).basicURLParse; -exports.setTheUsername = __nccwpck_require__(2158).setTheUsername; -exports.setThePassword = __nccwpck_require__(2158).setThePassword; -exports.serializeHost = __nccwpck_require__(2158).serializeHost; -exports.serializeInteger = __nccwpck_require__(2158).serializeInteger; -exports.parseURL = __nccwpck_require__(2158).parseURL; - +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPipelineResponse = exports.toCompatResponse = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const util_js_1 = __nccwpck_require__(43732); +const originalResponse = Symbol("Original FullOperationResponse"); +/** + * A helper to convert response objects from the new pipeline back to the old one. + * @param response - A response object from core-client. + * @returns A response compatible with `HttpOperationResponse` from core-http. + */ +function toCompatResponse(response, options) { + let request = (0, util_js_1.toWebResourceLike)(response.request); + let headers = (0, util_js_1.toHttpHeadersLike)(response.headers); + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } + else if (prop === "request") { + return request; + } + else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } + else if (prop === "request") { + request = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return Object.assign(Object.assign({}, response), { request, + headers }); + } +} +exports.toCompatResponse = toCompatResponse; +/** + * A helper to convert back to a PipelineResponse + * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. + */ +function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } + else { + return Object.assign(Object.assign({}, compatResponse), { headers, request: (0, util_js_1.toPipelineRequest)(compatResponse.request) }); + } +} +exports.toPipelineResponse = toPipelineResponse; +//# sourceMappingURL=response.js.map /***/ }), -/***/ 2158: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 43732: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -const punycode = __nccwpck_require__(5477); -const tr46 = __nccwpck_require__(4256); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpHeaders = exports.toHttpHeadersLike = exports.toWebResourceLike = exports.toPipelineRequest = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +// We use a custom symbol to cache a reference to the original request without +// exposing it on the public interface. +const originalRequestSymbol = Symbol("Original PipelineRequest"); +// Symbol.for() will return the same symbol if it's already been created +// This particular one is used in core-client to handle the case of when a request is +// cloned but we need to retrieve the OperationSpec and OperationArguments from the +// original request. +const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); +function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request = compatWebResource[originalRequestSymbol]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); + if (request) { + request.headers = headers; + return request; + } + else { + const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = + options.originalRequest; + } + return newRequest; + } +} +exports.toPipelineRequest = toPipelineRequest; +function toWebResourceLike(request, options) { + var _a; + const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; + const webResource = { + url: request.url, + method: request.method, + headers: toHttpHeadersLike(request.headers), + withCredentials: request.withCredentials, + timeout: request.timeout, + requestId: request.headers.get("x-ms-client-request-id") || request.requestId, + abortSignal: request.abortSignal, + body: request.body, + formData: request.formData, + keepAlive: !!request.disableKeepAlive, + onDownloadProgress: request.onDownloadProgress, + onUploadProgress: request.onUploadProgress, + proxySettings: request.proxySettings, + streamResponseStatusCodes: request.streamResponseStatusCodes, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + /** do nothing */ + }, + }; + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request; + } + else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest, + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return webResource; + } +} +exports.toWebResourceLike = toWebResourceLike; +/** + * Converts HttpHeaders from core-rest-pipeline to look like + * HttpHeaders from core-http. + * @param headers - HttpHeaders from core-rest-pipeline + * @returns HttpHeaders as they looked in core-http + */ +function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); +} +exports.toHttpHeadersLike = toHttpHeadersLike; +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +/** + * A collection of HTTP header key/value pairs. + */ +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +exports.HttpHeaders = HttpHeaders; +//# sourceMappingURL=util.js.map -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; +/***/ }), -const failure = Symbol("failure"); +/***/ 57759: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} +"use strict"; -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pollHttpOperation = exports.isOperationError = exports.getResourceLocation = exports.getOperationStatus = exports.getOperationLocation = exports.initHttpOperation = exports.getStatusFromInitialResponse = exports.getErrorFromResponse = exports.parseRetryAfter = exports.inferLroMode = void 0; +const operation_js_1 = __nccwpck_require__(70281); +const logger_js_1 = __nccwpck_require__(28121); +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; } - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; } - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; } - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +function findResourceLocation(inputs) { + var _a; + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } } - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; +function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; + } + else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath, + }; + } + else { + return undefined; + } } - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +exports.inferLroMode = inferLroMode; +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { + case undefined: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } +} +function getStatus(rawResponse) { + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); } - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); } - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } + else if (statusCode < 300) { + return "succeeded"; + } + else { + return "failed"; + } } - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter)) + : retryAfterInSeconds * 1000; + } + return undefined; } - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +exports.parseRetryAfter = parseRetryAfter; +function getErrorFromResponse(response) { + const error = accessBodyProperty(response, "error"); + if (!error) { + logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; } - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +exports.getErrorFromResponse = getErrorFromResponse; +function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return undefined; } - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; +function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; } - -function isSpecial(url) { - return isSpecialScheme(url.scheme); +exports.getStatusFromInitialResponse = getStatusFromInitialResponse; +/** + * Initiates the long-running operation. + */ +async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); + }, + stateProxy, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, + }); } - -function defaultPort(scheme) { - return specialSchemes[scheme]; +exports.initHttpOperation = initHttpOperation; +function getOperationLocation({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return undefined; + } + } +} +exports.getOperationLocation = getOperationLocation; +function getOperationStatus({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } +} +exports.getOperationStatus = getOperationStatus; +function accessBodyProperty({ flatResponse, rawResponse }, prop) { + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; +} +function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; +} +exports.getResourceLocation = getResourceLocation; +function isOperationError(e) { + return e.name === "RestError"; +} +exports.isOperationError = isOperationError; +/** Polls the long-running operation. */ +async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult + ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) + : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, + }); } +exports.pollHttpOperation = pollHttpOperation; +//# sourceMappingURL=operation.js.map -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } +/***/ }), - return "%" + hex; +/***/ 78412: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHttpPoller = void 0; +const operation_js_1 = __nccwpck_require__(57759); +const poller_js_1 = __nccwpck_require__(76713); +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful, + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); + }, + poll: lro.sendPollRequest, + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + }); } +exports.createHttpPoller = createHttpPoller; +//# sourceMappingURL=poller.js.map -function utf8PercentEncode(c) { - const buf = new Buffer(c); +/***/ }), - let str = ""; +/***/ 90334: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } +"use strict"; - return str; -} +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHttpPoller = void 0; +const tslib_1 = __nccwpck_require__(4351); +var poller_js_1 = __nccwpck_require__(78412); +Object.defineProperty(exports, "createHttpPoller", ({ enumerable: true, get: function () { return poller_js_1.createHttpPoller; } })); +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +// export { +// BuildCreatePollerOptions, +// Operation, +// CreatePollerOptions, +// OperationConfig, +// RestorableOperationState, +// } from "./poller/models"; +// export { buildCreatePoller } from "./poller/poller"; +/** legacy */ +tslib_1.__exportStar(__nccwpck_require__(52260), exports); +tslib_1.__exportStar(__nccwpck_require__(17270), exports); +tslib_1.__exportStar(__nccwpck_require__(93586), exports); +//# sourceMappingURL=index.js.map -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); +/***/ }), + +/***/ 52260: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LroEngine = void 0; +var lroEngine_js_1 = __nccwpck_require__(35780); +Object.defineProperty(exports, "LroEngine", ({ enumerable: true, get: function () { return lroEngine_js_1.LroEngine; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 35780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LroEngine = void 0; +const operation_js_1 = __nccwpck_require__(77954); +const constants_js_1 = __nccwpck_require__(53846); +const poller_js_1 = __nccwpck_require__(17270); +const operation_js_2 = __nccwpck_require__(70281); +/** + * The LRO Engine, a class that performs polling. + */ +class LroEngine extends poller_js_1.Poller { + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; + const state = resumeFrom + ? (0, operation_js_2.deserializeState)(resumeFrom) + : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); } - } - return new Buffer(output).toString(); } +exports.LroEngine = LroEngine; +//# sourceMappingURL=lroEngine.js.map -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} +/***/ }), -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} +/***/ 77954: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GenericPollOperation = void 0; +const operation_js_1 = __nccwpck_require__(57759); +const logger_js_1 = __nccwpck_require__(28121); +const createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => (state.isCancelled = true), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.isStarted = true), + setSucceeded: (state) => (state.isCompleted = true), + setFailed: () => { + /** empty body */ + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), +}); +class GenericPollOperation { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + var _a; + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = Object.assign(Object.assign({}, this.state), (await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + }))); + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === undefined) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState + ? (state, { rawResponse }) => updateState(state, rawResponse) + : undefined, + isDone: isDone + ? ({ flatResponse }, state) => isDone(flatResponse, state) + : undefined, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult, + }); + } + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); + return this; + } + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } } +exports.GenericPollOperation = GenericPollOperation; +//# sourceMappingURL=operation.js.map -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); +/***/ }), - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } +/***/ 93586: +/***/ ((__unused_webpack_module, exports) => { - return cStr; -} +"use strict"; -function parseIPv4Number(input) { - let R = 10; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +//# sourceMappingURL=pollOperation.js.map + +/***/ }), + +/***/ 17270: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Poller = exports.PollerCancelledError = exports.PollerStoppedError = void 0; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +exports.PollerStoppedError = PollerStoppedError; +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +exports.PollerCancelledError = PollerCancelledError; +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +class Poller { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } + } + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + // This is needed because the state could have been updated by + // `cancelOperation`, e.g. the operation is canceled or an error occurred. + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +exports.Poller = Poller; +//# sourceMappingURL=poller.js.map - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } +/***/ }), - if (input === "") { - return 0; - } +/***/ 28121: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } +"use strict"; - return parseInt(input, R); -} +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +const logger_1 = __nccwpck_require__(89497); +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +exports.logger = (0, logger_1.createClientLogger)("core-lro"); +//# sourceMappingURL=logger.js.map -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } +/***/ }), - if (parts.length > 4) { - return input; - } +/***/ 53846: +/***/ ((__unused_webpack_module, exports) => { - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } +"use strict"; - numbers.push(n); - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.terminalStates = exports.POLL_INTERVAL_IN_MS = void 0; +/** + * The default time interval to wait before sending the next polling request. + */ +exports.POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +exports.terminalStates = ["succeeded", "canceled", "failed"]; +//# sourceMappingURL=constants.js.map - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } +/***/ }), - let ipv4 = numbers.pop(); - let counter = 0; +/***/ 70281: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } +"use strict"; - return ipv4; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pollOperation = exports.initOperation = exports.deserializeState = void 0; +const logger_js_1 = __nccwpck_require__(28121); +const constants_js_1 = __nccwpck_require__(53846); +/** + * Deserializes the state + */ +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } } - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; +exports.deserializeState = deserializeState; +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; } - n = Math.floor(n / 256); - } - - return output; + return message + " " + innerMessage; } - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError === null || getError === void 0 ? void 0 : getError(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger_js_1.logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger_js_1.logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +exports.initOperation = initOperation; +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger_js_1.logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } } + return { response, status }; +} +/** Polls the long-running operation. */ +async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!constants_js_1.terminalStates.includes(status)) { + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); + } + else + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + } + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + } +} +exports.pollOperation = pollOperation; +//# sourceMappingURL=operation.js.map - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; +/***/ }), - if (pieceIndex > 6) { - return failure; - } +/***/ 76713: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let numbersSeen = 0; +"use strict"; - while (input[pointer] !== undefined) { - let ipv4Piece = null; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.buildCreatePoller = void 0; +const operation_js_1 = __nccwpck_require__(70281); +const constants_js_1 = __nccwpck_require__(53846); +const core_util_1 = __nccwpck_require__(80637); +const createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => (state.status = "canceled"), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.status = "running"), + setSucceeded: (state) => (state.status = "succeeded"), + setFailed: (state) => (state.status = "failed"), + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded", +}); +/** + * Returns a poller factory. + */ +function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback + ? (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() + : undefined; + const state = restoreFrom + ? (0, operation_js_1.deserializeState)(restoreFrom) + : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === undefined, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state, + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + // In the future we can use AbortSignal.any() instead + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { + abortController.abort(); + } + else if (!abortSignal.aborted) { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } + finally { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } + else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = undefined; + }))), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } + else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful, + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + }, + }; + return poller; + }; +} +exports.buildCreatePoller = buildCreatePoller; +//# sourceMappingURL=poller.js.map - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } +/***/ }), - if (!isASCIIDigit(input[pointer])) { - return failure; - } +/***/ 43171: +/***/ ((__unused_webpack_module, exports) => { - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } +"use strict"; - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0; +exports.SDK_VERSION = "1.18.0"; +exports.DEFAULT_RETRY_POLICY_COUNT = 3; +//# sourceMappingURL=constants.js.map - ++numbersSeen; +/***/ }), - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } +/***/ 81060: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (numbersSeen !== 4) { - return failure; - } +"use strict"; - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createPipelineFromOptions = createPipelineFromOptions; +const logPolicy_js_1 = __nccwpck_require__(46821); +const pipeline_js_1 = __nccwpck_require__(83906); +const redirectPolicy_js_1 = __nccwpck_require__(98526); +const userAgentPolicy_js_1 = __nccwpck_require__(88935); +const multipartPolicy_js_1 = __nccwpck_require__(19042); +const decompressResponsePolicy_js_1 = __nccwpck_require__(57618); +const defaultRetryPolicy_js_1 = __nccwpck_require__(48549); +const formDataPolicy_js_1 = __nccwpck_require__(16501); +const core_util_1 = __nccwpck_require__(80637); +const proxyPolicy_js_1 = __nccwpck_require__(94761); +const setClientRequestIdPolicy_js_1 = __nccwpck_require__(93860); +const tlsPolicy_js_1 = __nccwpck_require__(88446); +const tracingPolicy_js_1 = __nccwpck_require__(80606); +/** + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. + */ +function createPipelineFromOptions(options) { + var _a; + const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); + if (core_util_1.isNodeLike) { + if (options.tlsOptions) { + pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); + } + pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); + pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); + } + pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); + pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); + pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); + pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)(Object.assign(Object.assign({}, options.userAgentOptions), options.loggingOptions)), { + afterPhase: "Retry", + }); + if (core_util_1.isNodeLike) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); } + pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; +} +//# sourceMappingURL=createPipelineFromOptions.js.map - address[pieceIndex] = value; - ++pieceIndex; - } +/***/ }), - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } +/***/ 88609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return address; -} +"use strict"; -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDefaultHttpClient = createDefaultHttpClient; +const nodeHttpClient_js_1 = __nccwpck_require__(49463); +/** + * Create the correct HttpClient for the current environment. + */ +function createDefaultHttpClient() { + return (0, nodeHttpClient_js_1.createNodeHttpClient)(); +} +//# sourceMappingURL=defaultHttpClient.js.map - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } +/***/ }), - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } +/***/ 60118: +/***/ ((__unused_webpack_module, exports) => { - output += address[pieceIndex].toString(16); +"use strict"; - if (pieceIndex !== 7) { - output += ":"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHttpHeaders = createHttpHeaders; +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; } - } - - return output; } - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } + } } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } + else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} +//# sourceMappingURL=httpHeaders.js.map - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } +/***/ }), - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } +/***/ 29146: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } +"use strict"; - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createFileFromStream = exports.createFile = exports.auxiliaryAuthenticationHeaderPolicyName = exports.auxiliaryAuthenticationHeaderPolicy = exports.ndJsonPolicyName = exports.ndJsonPolicy = exports.bearerTokenAuthenticationPolicyName = exports.bearerTokenAuthenticationPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.userAgentPolicyName = exports.userAgentPolicy = exports.defaultRetryPolicy = exports.tracingPolicyName = exports.tracingPolicy = exports.retryPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.setClientRequestIdPolicyName = exports.setClientRequestIdPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.isRestError = exports.RestError = exports.createPipelineRequest = exports.createHttpHeaders = exports.createDefaultHttpClient = exports.createPipelineFromOptions = exports.createEmptyPipeline = void 0; +var pipeline_js_1 = __nccwpck_require__(83906); +Object.defineProperty(exports, "createEmptyPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createEmptyPipeline; } })); +var createPipelineFromOptions_js_1 = __nccwpck_require__(81060); +Object.defineProperty(exports, "createPipelineFromOptions", ({ enumerable: true, get: function () { return createPipelineFromOptions_js_1.createPipelineFromOptions; } })); +var defaultHttpClient_js_1 = __nccwpck_require__(88609); +Object.defineProperty(exports, "createDefaultHttpClient", ({ enumerable: true, get: function () { return defaultHttpClient_js_1.createDefaultHttpClient; } })); +var httpHeaders_js_1 = __nccwpck_require__(60118); +Object.defineProperty(exports, "createHttpHeaders", ({ enumerable: true, get: function () { return httpHeaders_js_1.createHttpHeaders; } })); +var pipelineRequest_js_1 = __nccwpck_require__(93536); +Object.defineProperty(exports, "createPipelineRequest", ({ enumerable: true, get: function () { return pipelineRequest_js_1.createPipelineRequest; } })); +var restError_js_1 = __nccwpck_require__(61036); +Object.defineProperty(exports, "RestError", ({ enumerable: true, get: function () { return restError_js_1.RestError; } })); +Object.defineProperty(exports, "isRestError", ({ enumerable: true, get: function () { return restError_js_1.isRestError; } })); +var decompressResponsePolicy_js_1 = __nccwpck_require__(57618); +Object.defineProperty(exports, "decompressResponsePolicy", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicy; } })); +Object.defineProperty(exports, "decompressResponsePolicyName", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicyName; } })); +var exponentialRetryPolicy_js_1 = __nccwpck_require__(1598); +Object.defineProperty(exports, "exponentialRetryPolicy", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicy; } })); +Object.defineProperty(exports, "exponentialRetryPolicyName", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; } })); +var setClientRequestIdPolicy_js_1 = __nccwpck_require__(93860); +Object.defineProperty(exports, "setClientRequestIdPolicy", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; } })); +Object.defineProperty(exports, "setClientRequestIdPolicyName", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; } })); +var logPolicy_js_1 = __nccwpck_require__(46821); +Object.defineProperty(exports, "logPolicy", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicy; } })); +Object.defineProperty(exports, "logPolicyName", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicyName; } })); +var multipartPolicy_js_1 = __nccwpck_require__(19042); +Object.defineProperty(exports, "multipartPolicy", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicy; } })); +Object.defineProperty(exports, "multipartPolicyName", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicyName; } })); +var proxyPolicy_js_1 = __nccwpck_require__(94761); +Object.defineProperty(exports, "proxyPolicy", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicy; } })); +Object.defineProperty(exports, "proxyPolicyName", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicyName; } })); +Object.defineProperty(exports, "getDefaultProxySettings", ({ enumerable: true, get: function () { return proxyPolicy_js_1.getDefaultProxySettings; } })); +var redirectPolicy_js_1 = __nccwpck_require__(98526); +Object.defineProperty(exports, "redirectPolicy", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicy; } })); +Object.defineProperty(exports, "redirectPolicyName", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicyName; } })); +var systemErrorRetryPolicy_js_1 = __nccwpck_require__(72470); +Object.defineProperty(exports, "systemErrorRetryPolicy", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; } })); +Object.defineProperty(exports, "systemErrorRetryPolicyName", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; } })); +var throttlingRetryPolicy_js_1 = __nccwpck_require__(54802); +Object.defineProperty(exports, "throttlingRetryPolicy", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicy; } })); +Object.defineProperty(exports, "throttlingRetryPolicyName", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; } })); +var retryPolicy_js_1 = __nccwpck_require__(39700); +Object.defineProperty(exports, "retryPolicy", ({ enumerable: true, get: function () { return retryPolicy_js_1.retryPolicy; } })); +var tracingPolicy_js_1 = __nccwpck_require__(80606); +Object.defineProperty(exports, "tracingPolicy", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicy; } })); +Object.defineProperty(exports, "tracingPolicyName", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicyName; } })); +var defaultRetryPolicy_js_1 = __nccwpck_require__(48549); +Object.defineProperty(exports, "defaultRetryPolicy", ({ enumerable: true, get: function () { return defaultRetryPolicy_js_1.defaultRetryPolicy; } })); +var userAgentPolicy_js_1 = __nccwpck_require__(88935); +Object.defineProperty(exports, "userAgentPolicy", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicy; } })); +Object.defineProperty(exports, "userAgentPolicyName", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicyName; } })); +var tlsPolicy_js_1 = __nccwpck_require__(88446); +Object.defineProperty(exports, "tlsPolicy", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicy; } })); +Object.defineProperty(exports, "tlsPolicyName", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicyName; } })); +var formDataPolicy_js_1 = __nccwpck_require__(16501); +Object.defineProperty(exports, "formDataPolicy", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicy; } })); +Object.defineProperty(exports, "formDataPolicyName", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicyName; } })); +var bearerTokenAuthenticationPolicy_js_1 = __nccwpck_require__(11319); +Object.defineProperty(exports, "bearerTokenAuthenticationPolicy", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; } })); +Object.defineProperty(exports, "bearerTokenAuthenticationPolicyName", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; } })); +var ndJsonPolicy_js_1 = __nccwpck_require__(82032); +Object.defineProperty(exports, "ndJsonPolicy", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicy; } })); +Object.defineProperty(exports, "ndJsonPolicyName", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicyName; } })); +var auxiliaryAuthenticationHeaderPolicy_js_1 = __nccwpck_require__(68152); +Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicy", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; } })); +Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicyName", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; } })); +var file_js_1 = __nccwpck_require__(3224); +Object.defineProperty(exports, "createFile", ({ enumerable: true, get: function () { return file_js_1.createFile; } })); +Object.defineProperty(exports, "createFileFromStream", ({ enumerable: true, get: function () { return file_js_1.createFileFromStream; } })); +//# sourceMappingURL=index.js.map - return asciiDomain; -} +/***/ }), -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } +/***/ 30648: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} +"use strict"; -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +const logger_1 = __nccwpck_require__(89497); +exports.logger = (0, logger_1.createClientLogger)("core-rest-pipeline"); +//# sourceMappingURL=log.js.map - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } +/***/ }), - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } +/***/ 49463: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } +"use strict"; - return { - idx: maxIdx, - len: maxLen - }; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getBodyLength = getBodyLength; +exports.createNodeHttpClient = createNodeHttpClient; +const tslib_1 = __nccwpck_require__(4351); +const http = tslib_1.__importStar(__nccwpck_require__(88849)); +const https = tslib_1.__importStar(__nccwpck_require__(22286)); +const zlib = tslib_1.__importStar(__nccwpck_require__(65628)); +const node_stream_1 = __nccwpck_require__(84492); +const abort_controller_1 = __nccwpck_require__(11514); +const httpHeaders_js_1 = __nccwpck_require__(60118); +const restError_js_1 = __nccwpck_require__(61036); +const log_js_1 = __nccwpck_require__(30648); +const DEFAULT_TLS_SETTINGS = {}; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + if (stream.readable === false) { + return Promise.resolve(); + } + return new Promise((resolve) => { + const handler = () => { + resolve(); + stream.removeListener("close", handler); + stream.removeListener("end", handler); + stream.removeListener("error", handler); + }; + stream.on("close", handler); + stream.on("end", handler); + stream.on("error", handler); + }); } - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; } - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +class ReportTransform extends node_stream_1.Transform { + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } +} +/** + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal + */ +class NodeHttpClient { + constructor() { + this.cachedHttpsAgents = new WeakMap(); + } + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a, _b, _c; + const abortController = new AbortController(); + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new abort_controller_1.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + } + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request, abortController, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, + }; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; + } + else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } + finally { + // clean up event listener + if (request.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }) + .catch((e) => { + log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request, abortController, body) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new restError_js_1.RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : restError_js_1.RestError.REQUEST_SEND_ERROR, request })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + log_js_1.logger.error("Unrecognized body type", body); + reject(new restError_js_1.RestError("Unrecognized body type")); + } + } + else { + // streams don't like "undefined" being passed as data + req.end(); + } + }); + } + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } + else { + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } } - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); +function getResponseHeaders(res) { + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } + else if (value) { + headers.set(header, value); + } + } + return headers; } - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib.createGunzip(); + stream.pipe(unzip); + return unzip; + } + else if (contentEncoding === "deflate") { + const inflate = zlib.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; } - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); + } + else { + reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { + code: restError_js_1.RestError.PARSE_ERROR, + })); + } + }); + }); } - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +/** @internal */ +function getBodyLength(body) { + if (!body) { + return 0; + } + else if (Buffer.isBuffer(body)) { + return body.length; + } + else if (isReadableStream(body)) { + return null; + } + else if (isArrayBuffer(body)) { + return body.byteLength; + } + else if (typeof body === "string") { + return Buffer.from(body).length; + } + else { + return null; + } } - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); +/** + * Create a new HttpClient instance for the NodeJS environment. + * @internal + */ +function createNodeHttpClient() { + return new NodeHttpClient(); } +//# sourceMappingURL=nodeHttpClient.js.map -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; +/***/ }), - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, +/***/ 83906: +/***/ ((__unused_webpack_module, exports) => { - cannotBeABaseURL: false - }; +"use strict"; - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createEmptyPipeline = createEmptyPipeline; +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); +/** + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal + */ +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; + } + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; + clone() { + return new HttpPipeline(this._policies); } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; + static create() { + return new HttpPipeline(); } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; + } + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } + } + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } + } + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } +} +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +function createEmptyPipeline() { + return HttpPipeline.create(); +} +//# sourceMappingURL=pipeline.js.map -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } +/***/ }), - return true; -}; +/***/ 93536: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); +"use strict"; - this.state = "path"; - --this.pointer; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createPipelineRequest = createPipelineRequest; +const httpHeaders_js_1 = __nccwpck_require__(60118); +const core_util_1 = __nccwpck_require__(80637); +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : (0, httpHeaders_js_1.createHttpHeaders)(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || (0, core_util_1.randomUUID)(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } +} +/** + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. + */ +function createPipelineRequest(options) { + return new PipelineRequestImpl(options); +} +//# sourceMappingURL=pipelineRequest.js.map - return true; -}; +/***/ }), -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } +/***/ 68152: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return true; -}; +"use strict"; -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.auxiliaryAuthenticationHeaderPolicyName = void 0; +exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; +const tokenCycler_js_1 = __nccwpck_require__(50601); +const log_js_1 = __nccwpck_require__(30648); +/** + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. + */ +exports.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; +} +/** + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works + */ +function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || log_js_1.logger; + const tokenCyclerMap = new WeakMap(); + return { + name: exports.auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${exports.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); + } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; +} +//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map - return true; -}; +/***/ }), -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } +/***/ 11319: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return true; -}; +"use strict"; -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bearerTokenAuthenticationPolicyName = void 0; +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +exports.parseChallenges = parseChallenges; +const tokenCycler_js_1 = __nccwpck_require__(50601); +const log_js_1 = __nccwpck_require__(30648); +const restError_js_1 = __nccwpck_require__(61036); +/** + * The programmatic identifier of the bearerTokenAuthenticationPolicy. + */ +exports.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Try to send the given request. + * + * When a response is received, returns a tuple of the response received and, if the response was received + * inside a thrown RestError, the RestError that was thrown. + * + * Otherwise, if an error was thrown while sending the request that did not provide an underlying response, it + * will be rethrown. + */ +async function trySendRequest(request, next) { + try { + return [await next(request), undefined]; } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } + catch (e) { + if ((0, restError_js_1.isRestError)(e) && e.response) { + return [e.response, e]; + } + else { + throw e; + } } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; +} +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + // Enable CAE true by default + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + enableCae: true, + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function isChallengeResponse(response) { + return response.status === 401 && response.headers.has("WWW-Authenticate"); +} +/** + * Re-authorize the request for CAE challenge. + * The response containing the challenge is `options.response`. + * If this method returns true, the underlying request will be sent once again. + */ +async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) { + var _a; + const { scopes } = onChallengeOptions; + const accessToken = await onChallengeOptions.getAccessToken(scopes, { + enableCae: true, + claims: caeClaims, + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `${(_a = accessToken.tokenType) !== null && _a !== void 0 ? _a : "Bearer"} ${accessToken.token}`); + return true; +} +/** + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. + */ +function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || log_js_1.logger; + const callbacks = { + authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, + authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge, + }; + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? (0, tokenCycler_js_1.createTokenCycler)(credential /* , options */) + : () => Promise.resolve(null); + return { + name: exports.bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + }); + let response; + let error; + let shouldSendRequest; + [response, error] = await trySendRequest(request, next); + if (isChallengeResponse(response)) { + let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); + // Handle CAE by default when receive CAE claim + if (claims) { + let parsedClaim; + // Return the response immediately if claims is not a valid base64 encoded string + try { + parsedClaim = atob(claims); + } + catch (e) { + logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); + return response; + } + shouldSendRequest = await authorizeRequestOnCaeChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + response, + request, + getAccessToken, + logger, + }, parsedClaim); + // Send updated request and handle response for RestError + if (shouldSendRequest) { + [response, error] = await trySendRequest(request, next); + } + } + else if (callbacks.authorizeRequestOnChallenge) { + // Handle custom challenges when client provides custom callback + shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger, + }); + // Send updated request and handle response for RestError + if (shouldSendRequest) { + [response, error] = await trySendRequest(request, next); + } + // If we get another CAE Claim, we will handle it by default and return whatever value we receive for this + if (isChallengeResponse(response)) { + claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); + if (claims) { + let parsedClaim; + try { + parsedClaim = atob(claims); + } + catch (e) { + logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); + return response; + } + shouldSendRequest = await authorizeRequestOnCaeChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + response, + request, + getAccessToken, + logger, + }, parsedClaim); + // Send updated request and handle response for RestError + if (shouldSendRequest) { + [response, error] = await trySendRequest(request, next); + } + } + } + } + } + if (error) { + throw error; + } + else { + return response; + } + }, + }; +} +/** + * Converts: `Bearer a="b", c="d", Pop e="f", g="h"`. + * Into: `[ { scheme: 'Bearer', params: { a: 'b', c: 'd' } }, { scheme: 'Pop', params: { e: 'f', g: 'h' } } ]`. + * + * @internal + */ +function parseChallenges(challenges) { + // Challenge regex seperates the string to individual challenges with different schemes in the format `Scheme a="b", c=d` + // The challenge regex captures parameteres with either quotes values or unquoted values + const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g; + // Parameter regex captures the claims group removed from the scheme in the format `a="b"` and `c="d"` + // CAE challenge always have quoted parameters. For more reference, https://learn.microsoft.com/entra/identity-platform/claims-challenge + const paramRegex = /(\w+)="([^"]*)"/g; + const parsedChallenges = []; + let match; + // Iterate over each challenge match + while ((match = challengeRegex.exec(challenges)) !== null) { + const scheme = match[1]; + const paramsString = match[2]; + const params = {}; + let paramMatch; + // Iterate over each parameter match + while ((paramMatch = paramRegex.exec(paramsString)) !== null) { + params[paramMatch[1]] = paramMatch[2]; + } + parsedChallenges.push({ scheme, params }); + } + return parsedChallenges; +} +/** + * Parse a pipeline response and look for a CAE challenge with "Bearer" scheme + * Return the value in the header without parsing the challenge + * @internal + */ +function getCaeChallengeClaims(challenges) { + var _a; + if (!challenges) { + return; } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } + // Find all challenges present in the header + const parsedChallenges = parseChallenges(challenges); + return (_a = parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")) === null || _a === void 0 ? void 0 : _a.params.claims; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map - return true; -}; +/***/ }), -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } +/***/ 57618: +/***/ ((__unused_webpack_module, exports) => { - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } +"use strict"; - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decompressResponsePolicyName = void 0; +exports.decompressResponsePolicy = decompressResponsePolicy; +/** + * The programmatic identifier of the decompressResponsePolicy. + */ +exports.decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * A policy to enable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +function decompressResponsePolicy() { + return { + name: exports.decompressResponsePolicyName, + async sendRequest(request, next) { + // HEAD requests have no body + if (request.method !== "HEAD") { + request.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request); + }, + }; +} +//# sourceMappingURL=decompressResponsePolicy.js.map - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } +/***/ }), - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } +/***/ 48549: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return true; -}; +"use strict"; -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultRetryPolicyName = void 0; +exports.defaultRetryPolicy = defaultRetryPolicy; +const exponentialRetryStrategy_js_1 = __nccwpck_require__(843); +const throttlingRetryStrategy_js_1 = __nccwpck_require__(66645); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * Name of the {@link defaultRetryPolicy} + */ +exports.defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +function defaultRetryPolicy(options = {}) { + var _a; + return { + name: exports.defaultRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=defaultRetryPolicy.js.map - return true; -}; +/***/ }), -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); +/***/ 1598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; +"use strict"; - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.exponentialRetryPolicyName = void 0; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +const exponentialRetryStrategy_js_1 = __nccwpck_require__(843); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * The programmatic identifier of the exponentialRetryPolicy. + */ +exports.exponentialRetryPolicyName = "exponentialRetryPolicy"; +/** + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. + */ +function exponentialRetryPolicy(options = {}) { + var _a; + return (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }); +} +//# sourceMappingURL=exponentialRetryPolicy.js.map - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } +/***/ }), - return true; -}; +/***/ 16501: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.formDataPolicyName = void 0; +exports.formDataPolicy = formDataPolicy; +const core_util_1 = __nccwpck_require__(80637); +const httpHeaders_js_1 = __nccwpck_require__(60118); +/** + * The programmatic identifier of the formDataPolicy. + */ +exports.formDataPolicyName = "formDataPolicy"; +function formDataToFormDataMap(formData) { + var _a; + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); + formDataMap[key].push(value); } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } + return formDataMap; +} +/** + * A policy that encodes FormData on the request into the body. + */ +function formDataPolicy() { + return { + name: exports.formDataPolicyName, + async sendRequest(request, next) { + if (core_util_1.isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { + request.formData = formDataToFormDataMap(request.body); + request.body = undefined; + } + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); + } + else { + await prepareFormData(request.formData, request); + } + request.formData = undefined; + } + return next(request); + }, + }; +} +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } + else { + urlSearchParams.append(key, value.toString()); + } } - this.state = "path"; - --this.pointer; - } + return urlSearchParams.toString(); +} +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; + } + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: (0, httpHeaders_js_1.createHttpHeaders)({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: (0, core_util_1.stringToUint8Array)(value, "utf-8"), + }); + } + else if (value === undefined || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } + else { + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + // again, || is used since an empty value.type means the content type is unset + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value, + }); + } + } + } + request.multipartBody = { parts }; +} +//# sourceMappingURL=formDataPolicy.js.map - return true; -}; +/***/ }), -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; +/***/ 46821: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (this.stateOverride) { - return false; - } +"use strict"; - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logPolicyName = void 0; +exports.logPolicy = logPolicy; +const log_js_1 = __nccwpck_require__(30648); +const sanitizer_js_1 = __nccwpck_require__(34472); +/** + * The programmatic identifier of the logPolicy. + */ +exports.logPolicyName = "logPolicy"; +/** + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. + */ +function logPolicy(options = {}) { + var _a; + const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : log_js_1.logger.info; + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: exports.logPolicyName, + async sendRequest(request, next) { + if (!logger.enabled) { + return next(request); + } + logger(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; +} +//# sourceMappingURL=logPolicy.js.map - return true; -}; +/***/ }), -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; +/***/ 19042: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (c !== 47 && c !== 92) { - --this.pointer; +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.multipartPolicyName = void 0; +exports.multipartPolicy = multipartPolicy; +const core_util_1 = __nccwpck_require__(80637); +const concat_js_1 = __nccwpck_require__(80107); +const typeGuards_js_1 = __nccwpck_require__(58520); +function generateBoundary() { + return `----AzSDKFormBoundary${(0, core_util_1.randomUUID)()}`; +} +function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; + return result; +} +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; + else if ((0, typeGuards_js_1.isBlob)(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); + else { + return undefined; } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } +} +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; + } + else { + total += partLength; + } + } + return total; +} +async function buildRequestBody(request, parts, boundary) { + const sources = [ + (0, core_util_1.stringToUint8Array)(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), + (0, core_util_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), + (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), + part.body, + (0, core_util_1.stringToUint8Array)(`\r\n--${boundary}`, "utf-8"), + ]), + (0, core_util_1.stringToUint8Array)("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); } - if (c === 63) { - this.url.query = ""; - this.state = "query"; + request.body = await (0, concat_js_1.concat)(sources); +} +/** + * Name of multipart policy + */ +exports.multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); } - } else { - // TODO: If c is not a URL code point and not "%", parse error. +} +/** + * Pipeline policy for multipart requests + */ +function multipartPolicy() { + return { + name: exports.multipartPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); + } + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); + } + else { + boundary = generateBoundary(); + } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; +} +//# sourceMappingURL=multipartPolicy.js.map - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } +/***/ }), - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } +/***/ 82032: +/***/ ((__unused_webpack_module, exports) => { - return true; -}; +"use strict"; -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ndJsonPolicyName = void 0; +exports.ndJsonPolicy = ndJsonPolicy; +/** + * The programmatic identifier of the ndJsonPolicy. + */ +exports.ndJsonPolicyName = "ndJsonPolicy"; +/** + * ndJsonPolicy is a policy used to control keep alive settings for every request. + */ +function ndJsonPolicy() { + return { + name: exports.ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); + }, + }; +} +//# sourceMappingURL=ndJsonPolicy.js.map - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } +/***/ }), - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } +/***/ 94761: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return true; -}; +"use strict"; -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.globalNoProxyList = exports.proxyPolicyName = void 0; +exports.loadNoProxy = loadNoProxy; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.proxyPolicy = proxyPolicy; +const https_proxy_agent_1 = __nccwpck_require__(77219); +const http_proxy_agent_1 = __nccwpck_require__(4654); +const log_js_1 = __nccwpck_require__(30648); +const HTTPS_PROXY = "HTTPS_PROXY"; +const HTTP_PROXY = "HTTP_PROXY"; +const ALL_PROXY = "ALL_PROXY"; +const NO_PROXY = "NO_PROXY"; +/** + * The programmatic identifier of the proxyPolicy. + */ +exports.proxyPolicyName = "proxyPolicy"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +exports.globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; + const host = new URL(uri).hostname; + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. + * If no argument is given, it attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + * @param proxyUrl - The url of the proxy to use. May contain authentication information. + * @deprecated - Internally this method is no longer necessary when setting proxy information. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const parsedUrl = new URL(proxyUrl); + const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password, + }; +} +/** + * This method attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + */ +function getDefaultProxySettingsInternal() { + const envProxy = loadEnvironmentProxyValue(); + return envProxy ? new URL(envProxy) : undefined; +} +function getUrlFromProxySettings(settings) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(settings.host); + } + catch (_a) { + throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); + } + parsedProxyUrl.port = String(settings.port); + if (settings.username) { + parsedProxyUrl.username = settings.username; + } + if (settings.password) { + parsedProxyUrl.password = settings.password; + } + return parsedProxyUrl; +} +function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { + // Custom Agent should take precedence so if one is present + // we should skip to avoid overwriting it. + if (request.agent) { + return; + } + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (request.tlsSettings) { + log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const headers = request.headers.toJSON(); + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpProxyAgent; } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; + else { + if (!cachedAgents.httpsProxyAgent) { + cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpsProxyAgent; } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!noProxyListLoaded) { + exports.globalNoProxyList.push(...loadNoProxy()); } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; + const defaultProxy = proxySettings + ? getUrlFromProxySettings(proxySettings) + : getDefaultProxySettingsInternal(); + const cachedAgents = {}; + return { + name: exports.proxyPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.proxySettings && + defaultProxy && + !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : exports.globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { + setProxyAgentOnRequest(request, cachedAgents, defaultProxy); + } + else if (request.proxySettings) { + setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); + } + return next(request); + }, + }; } +//# sourceMappingURL=proxyPolicy.js.map -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); +/***/ }), - if (tuple.port !== null) { - result += ":" + tuple.port; - } +/***/ 98526: +/***/ ((__unused_webpack_module, exports) => { - return result; -} +"use strict"; -module.exports.serializeURL = serializeURL; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.redirectPolicyName = void 0; +exports.redirectPolicy = redirectPolicy; +/** + * The programmatic identifier of the redirectPolicy. + */ +exports.redirectPolicyName = "redirectPolicy"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +/** + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. + */ +function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: exports.redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; +/***/ }), -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } +/***/ 39700: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } +"use strict"; - return usm.url; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retryPolicy = retryPolicy; +const helpers_js_1 = __nccwpck_require__(21333); +const logger_1 = __nccwpck_require__(89497); +const abort_controller_1 = __nccwpck_require__(11514); +const constants_js_1 = __nccwpck_require__(43171); +const retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new abort_controller_1.AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await (0, helpers_js_1.delay)(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. + } + }, + }; +} +//# sourceMappingURL=retryPolicy.js.map -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; +/***/ }), -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; +/***/ 93860: +/***/ ((__unused_webpack_module, exports) => { -module.exports.serializeHost = serializeHost; +"use strict"; -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setClientRequestIdPolicyName = void 0; +exports.setClientRequestIdPolicy = setClientRequestIdPolicy; +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +exports.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: exports.setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); + } + return next(request); + }, + }; +} +//# sourceMappingURL=setClientRequestIdPolicy.js.map -module.exports.serializeInteger = function (integer) { - return String(integer); -}; +/***/ }), -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } +/***/ 72470: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.systemErrorRetryPolicyName = void 0; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +const exponentialRetryStrategy_js_1 = __nccwpck_require__(843); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * Name of the {@link systemErrorRetryPolicy} + */ +exports.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; +/** + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. + */ +function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: exports.systemErrorRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map /***/ }), -/***/ 3185: -/***/ ((module) => { +/***/ 54802: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.throttlingRetryPolicyName = void 0; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +const throttlingRetryStrategy_js_1 = __nccwpck_require__(66645); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * Name of the {@link throttlingRetryPolicy} + */ +exports.throttlingRetryPolicyName = "throttlingRetryPolicy"; +/** + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. + */ +function throttlingRetryPolicy(options = {}) { + var _a; + return { + name: exports.throttlingRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=throttlingRetryPolicy.js.map -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } -}; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; -}; +/***/ }), -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; -}; +/***/ 88446: +/***/ ((__unused_webpack_module, exports) => { +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.tlsPolicyName = void 0; +exports.tlsPolicy = tlsPolicy; +/** + * Name of the TLS Policy + */ +exports.tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +function tlsPolicy(tlsSettings) { + return { + name: exports.tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + }, + }; +} +//# sourceMappingURL=tlsPolicy.js.map /***/ }), -/***/ 2940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) +/***/ 80606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return wrapper +"use strict"; - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.tracingPolicyName = void 0; +exports.tracingPolicy = tracingPolicy; +const core_tracing_1 = __nccwpck_require__(19363); +const constants_js_1 = __nccwpck_require__(43171); +const userAgent_js_1 = __nccwpck_require__(96158); +const log_js_1 = __nccwpck_require__(30648); +const core_util_1 = __nccwpck_require__(80637); +const restError_js_1 = __nccwpck_require__(61036); +const sanitizer_js_1 = __nccwpck_require__(34472); +/** + * The programmatic identifier of the tracingPolicy. + */ +exports.tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +function tracingPolicy(options = {}) { + const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + const tracingClient = tryCreateTracingClient(); + return { + name: exports.tracingPolicyName, + async sendRequest(request, next) { + var _a; + if (!tracingClient) { + return next(request); + } + const userAgent = await userAgentPromise; + const spanAttributes = { + "http.url": sanitizer.sanitizeUrl(request.url), + "http.method": request.method, + "http.user_agent": userAgent, + requestId: request.requestId, + }; + if (userAgent) { + spanAttributes["http.user_agent"] = userAgent; + } + const { span, tracingContext } = (_a = tryCreateSpan(tracingClient, request, spanAttributes)) !== null && _a !== void 0 ? _a : {}; + if (!span || !tracingContext) { + return next(request); + } + try { + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; + } + catch (err) { + tryProcessError(span, err); + throw err; + } + }, + }; +} +function tryCreateTracingClient() { + try { + return (0, core_tracing_1.createTracingClient)({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: constants_js_1.SDK_VERSION, + }); } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) + catch (e) { + log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`); + return undefined; } - return ret - } } - +function tryCreateSpan(tracingClient, request, spanAttributes) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); + } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + } + catch (e) { + log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + return undefined; + } +} +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: (0, core_util_1.isError)(error) ? error : undefined, + }); + if ((0, restError_js_1.isRestError)(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); + } + span.end(); + } + catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + } +} +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + status: "success", + }); + span.end(); + } + catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + } +} +//# sourceMappingURL=tracingPolicy.js.map /***/ }), -/***/ 5185: -/***/ ((module) => { - -class Node { - /// value; - /// next; +/***/ 88935: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - constructor(value) { - this.value = value; +"use strict"; - // TODO: Remove this when targeting Node.js 12. - this.next = undefined; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.userAgentPolicyName = void 0; +exports.userAgentPolicy = userAgentPolicy; +const userAgent_js_1 = __nccwpck_require__(96158); +const UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); +/** + * The programmatic identifier of the userAgentPolicy. + */ +exports.userAgentPolicyName = "userAgentPolicy"; +/** + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. + */ +function userAgentPolicy(options = {}) { + const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + return { + name: exports.userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, await userAgentValue); + } + return next(request); + }, + }; } +//# sourceMappingURL=userAgentPolicy.js.map -class Queue { - // TODO: Use private class fields when targeting Node.js 12. - // #_head; - // #_tail; - // #_size; - - constructor() { - this.clear(); - } - - enqueue(value) { - const node = new Node(value); - - if (this._head) { - this._tail.next = node; - this._tail = node; - } else { - this._head = node; - this._tail = node; - } +/***/ }), - this._size++; - } +/***/ 61036: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - dequeue() { - const current = this._head; - if (!current) { - return; - } +"use strict"; - this._head = this._head.next; - this._size--; - return current.value; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RestError = void 0; +exports.isRestError = isRestError; +const core_util_1 = __nccwpck_require__(80637); +const inspect_js_1 = __nccwpck_require__(33106); +const sanitizer_js_1 = __nccwpck_require__(34472); +const errorSanitizer = new sanitizer_js_1.Sanitizer(); +/** + * A custom error type for failed pipeline requests. + */ +class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + // The request and response may contain sensitive information in the headers or body. + // To help prevent this sensitive information being accidentally logged, the request and response + // properties are marked as non-enumerable here. This prevents them showing up in the output of + // JSON.stringify and console.log. + Object.defineProperty(this, "request", { value: options.request, enumerable: false }); + Object.defineProperty(this, "response", { value: options.response, enumerable: false }); + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [inspect_js_1.custom]() { + // Extract non-enumerable properties and add them back. This is OK since in this output the request and + // response get sanitized. + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(Object.assign(Object.assign({}, this), { request: this.request, response: this.response }))}`; + } +} +exports.RestError = RestError; +/** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return (0, core_util_1.isError)(e) && e.name === "RestError"; +} +//# sourceMappingURL=restError.js.map - clear() { - this._head = undefined; - this._tail = undefined; - this._size = 0; - } +/***/ }), - get size() { - return this._size; - } +/***/ 843: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - * [Symbol.iterator]() { - let current = this._head; +"use strict"; - while (current) { - yield current.value; - current = current.next; - } - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.exponentialRetryStrategy = exponentialRetryStrategy; +exports.isExponentialRetryResponse = isExponentialRetryResponse; +exports.isSystemError = isSystemError; +const core_util_1 = __nccwpck_require__(80637); +const throttlingRetryStrategy_js_1 = __nccwpck_require__(66645); +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + return (0, core_util_1.calculateRetryDelay)(retryCount, { + retryDelayInMs: retryInterval, + maxRetryDelayInMs: maxRetryInterval, + }); + }, + }; } - -module.exports = Queue; - +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +function isSystemError(err) { + if (!err) { + return false; + } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); +} +//# sourceMappingURL=exponentialRetryStrategy.js.map /***/ }), -/***/ 4553: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 66645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.run = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const cache = __importStar(__nccwpck_require__(7799)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in -// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to -// throw an uncaught exception. Instead of failing this action, just warn. -process.on('uncaughtException', e => { - const warningPrefix = '[warning]'; - core.info(`${warningPrefix}${e.message}`); -}); -// Added early exit to resolve issue with slow post action step: -function run(earlyExit) { - return __awaiter(this, void 0, void 0, function* () { - try { - const cacheLock = core.getState(constants_1.State.CachePackageManager); - if (cacheLock) { - yield cachePackages(cacheLock); - if (earlyExit) { - process.exit(0); - } - } - else { - core.debug(`Caching for '${cacheLock}' is not supported`); - } - } - catch (error) { - core.setFailed(error.message); - } - }); -} -exports.run = run; -const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const state = core.getState(constants_1.State.CacheMatchedKey); - const primaryKey = core.getState(constants_1.State.CachePrimaryKey); - const cachePaths = JSON.parse(core.getState(constants_1.State.CachePaths) || '[]'); - const packageManagerInfo = yield (0, cache_utils_1.getPackageManagerInfo)(packageManager); - if (!packageManagerInfo) { - core.debug(`Caching for '${packageManager}' is not supported`); - return; - } - if (!cachePaths.length) { - // TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?) - // export declare function getInput(name: string, options?: InputOptions): string; - const cacheDependencyPath = core.getInput('cache-dependency-path') || ''; - throw new Error(`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`); - } - if (primaryKey === state) { - core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); - return; +exports.isThrottlingRetryResponse = isThrottlingRetryResponse; +exports.throttlingRetryStrategy = throttlingRetryStrategy; +const helpers_js_1 = __nccwpck_require__(21333); +/** + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). + */ +const RetryAfterHeader = "Retry-After"; +/** + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp + */ +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; +/** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * + * @internal + */ +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds + } + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; } - const cacheId = yield cache.saveCache(cachePaths, primaryKey); - if (cacheId == '') { - return; + catch (_a) { + return undefined; } - core.info(`Cache saved with the key: ${primaryKey}`); -}); -run(true); - +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs, + }; + }, + }; +} +//# sourceMappingURL=throttlingRetryStrategy.js.map /***/ }), -/***/ 1678: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 80107: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.isGhes = exports.repoHasYarnBerryManagedDependencies = exports.getCacheDirectories = exports.resetProjectDirectoriesMemoized = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -const glob = __importStar(__nccwpck_require__(8090)); -const path_1 = __importDefault(__nccwpck_require__(1017)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const util_1 = __nccwpck_require__(2629); -exports.supportedPackageManagers = { - npm: { - name: 'npm', - lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('npm config get cache', 'Could not get npm cache folder path') - }, - pnpm: { - name: 'pnpm', - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('pnpm store path --silent', 'Could not get pnpm cache folder path') - }, - yarn: { - name: 'yarn', - lockFilePatterns: ['yarn.lock'], - getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () { - const yarnVersion = yield (0, exports.getCommandOutputNotEmpty)(`yarn --version`, 'Could not retrieve version of yarn', projectDir); - core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`); - const stdOut = yarnVersion.startsWith('1.') - ? yield (0, exports.getCommandOutput)('yarn cache dir', projectDir) - : yield (0, exports.getCommandOutput)('yarn config get cacheFolder', projectDir); - if (!stdOut) { - throw new Error(`Could not get yarn cache folder path for ${projectDir}`); +exports.concat = concat; +const tslib_1 = __nccwpck_require__(4351); +const node_stream_1 = __nccwpck_require__(84492); +const typeGuards_js_1 = __nccwpck_require__(58520); +const file_js_1 = __nccwpck_require__(3224); +function streamAsyncIterator() { + return tslib_1.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = yield tslib_1.__await(reader.read()); + if (done) { + return yield tslib_1.__await(void 0); + } + yield yield tslib_1.__await(value); } - return stdOut; - }) + } + finally { + reader.releaseLock(); + } + }); +} +function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); } -}; -const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd }))); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); } - return stdout.trim(); -}); -exports.getCommandOutput = getCommandOutput; -const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = (0, exports.getCommandOutput)(toolCommand, cwd); - if (!stdOut) { - throw new Error(error); +} +function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return node_stream_1.Readable.fromWeb(stream); } - return stdOut; -}); -exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty; -const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; + else { + return stream; } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; +} +function toStream(source) { + if (source instanceof Uint8Array) { + return node_stream_1.Readable.from(Buffer.from(source)); } - else if (packageManager === 'yarn') { - return exports.supportedPackageManagers.yarn; + else if ((0, typeGuards_js_1.isBlob)(source)) { + return toStream((0, file_js_1.getRawContent)(source)); } else { - return null; + return ensureNodeStream(source); } -}); -exports.getPackageManagerInfo = getPackageManagerInfo; +} /** - * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` - * - first through `getCacheDirectories` - * - second from `repoHasYarn3ManagedCache` + * Utility function that concatenates a set of binary inputs into one combined output. * - * it contains expensive IO operation and thus should be memoized + * @param sources - array of sources for the concatenation + * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. + * In browser, returns a `Blob` representing all the concatenated inputs. + * + * @internal */ -let projectDirectoriesMemoized = null; +async function concat(sources) { + return function () { + const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream); + return node_stream_1.Readable.from((function () { + return tslib_1.__asyncGenerator(this, arguments, function* () { + var _a, e_1, _b, _c; + for (const stream of streams) { + try { + for (var _d = true, stream_1 = (e_1 = void 0, tslib_1.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib_1.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { + _c = stream_1_1.value; + _d = false; + const chunk = _c; + yield yield tslib_1.__await(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = stream_1.return)) yield tslib_1.__await(_b.call(stream_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); + })()); + }; +} +//# sourceMappingURL=concat.js.map + +/***/ }), + +/***/ 3224: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRawContent = getRawContent; +exports.createFileFromStream = createFileFromStream; +exports.createFile = createFile; +const core_util_1 = __nccwpck_require__(80637); +const typeGuards_js_1 = __nccwpck_require__(58520); +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; /** - * unit test must reset memoized variables + * Private symbol used as key on objects created using createFile containing the + * original source of the file object. + * + * This is used in Node to access the original Node stream without using Blob#stream, which + * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and + * Readable#to/fromWeb in Node versions we support: + * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) + * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) + * + * Once these versions are no longer supported, we may be able to stop doing this. + * + * @internal */ -const resetProjectDirectoriesMemoized = () => (projectDirectoriesMemoized = null); -exports.resetProjectDirectoriesMemoized = resetProjectDirectoriesMemoized; +const rawContent = Symbol("rawContent"); +function hasRawContent(x) { + return typeof x[rawContent] === "function"; +} /** - * Expands (converts) the string input `cache-dependency-path` to list of directories that - * may be project roots - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return list of directories and possible + * Extract the raw content from a given blob-like object. If the input was created using createFile + * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. + * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. + * + * @internal */ -const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - if (projectDirectoriesMemoized !== null) { - return projectDirectoriesMemoized; +function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); } - const globber = yield glob.create(cacheDependencyPath); - const cacheDependenciesPaths = yield globber.glob(); - const existingDirectories = cacheDependenciesPaths - .map(path_1.default.dirname) - .filter((0, util_1.unique)()) - .map(dirName => fs_1.default.realpathSync(dirName)) - .filter(directory => fs_1.default.lstatSync(directory).isDirectory()); - if (!existingDirectories.length) - core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`); - projectDirectoriesMemoized = existingDirectories; - return existingDirectories; -}); -/** - * Finds the cache directories configured for the repo if cache-dependency-path is not empty - * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return list of files on which the cache depends - */ -const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath); - const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () { - const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(projectDirectory); - core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`); - return cacheFolderPath; - }))); - // uniq in order to do not cache the same directories twice - return cacheFoldersPaths.filter((0, util_1.unique)()); -}); -/** - * Finds the cache directories configured for the repo ignoring cache-dependency-path - * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM - * @return list of files on which the cache depends - */ -const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () { - const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(); - core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`); - return [cacheFolderPath]; -}); + else { + return blob.stream(); + } +} /** - * A function to find the cache directories configured for the repo - * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty - * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return list of files on which the cache depends + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ -const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project - // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 - if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { - return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath); - } - return getCacheDirectoriesForRootProject(packageManagerInfo); -}); -exports.getCacheDirectories = getCacheDirectories; +function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { + const s = stream(); + if ((0, typeGuards_js_1.isNodeReadableStream)(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + } + return s; + }, [rawContent]: stream }); +} /** - * A function to check if the directory is a yarn project configured to manage - * obsolete dependencies in the local cache - * @param directory - a path to the folder - * @return - true if the directory's project is yarn managed - * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false - * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false - * - if local cache is not explicitly enabled (not yarn3), return false - * - return true otherwise + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ -const projectHasYarnBerryManagedDependencies = (directory) => __awaiter(void 0, void 0, void 0, function* () { - const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; - core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); - // if .yarn/cache directory exists the cache is managed by version control system - const yarnCacheFile = path_1.default.join(workDir, '.yarn', 'cache'); - if (fs_1.default.existsSync(yarnCacheFile) && - fs_1.default.lstatSync(yarnCacheFile).isDirectory()) { - core.debug(`"${workDir}" has .yarn/cache - dependencies are kept in the repository`); - return Promise.resolve(false); - } - // NOTE: yarn1 returns 'undefined' with return code = 0 - const enableGlobalCache = yield (0, exports.getCommandOutput)('yarn config get enableGlobalCache', workDir); - // only local cache is not managed by yarn - const managed = enableGlobalCache.includes('false'); - if (managed) { - core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); - return true; +function createFile(content, name, options = {}) { + var _a, _b, _c; + if (core_util_1.isNodeLike) { + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); } else { - core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); - return false; + return new File([content], name, options); } -}); +} +//# sourceMappingURL=file.js.map + +/***/ }), + +/***/ 21333: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.delay = delay; +exports.parseHeaderValueAsNumber = parseHeaderValueAsNumber; +const abort_controller_1 = __nccwpck_require__(11514); +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise + */ +function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new abort_controller_1.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} /** - * A function to report the repo contains Yarn managed projects - * @param packageManagerInfo - used to make sure current package manager is yarn - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return - true if all project directories configured to be Yarn managed + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. */ -const repoHasYarnBerryManagedDependencies = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManagerInfo.name !== 'yarn') - return false; - const yarnDirs = cacheDependencyPath - ? yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) - : ['']; - const isManagedList = yield Promise.all(yarnDirs.map(projectHasYarnBerryManagedDependencies)); - return isManagedList.every(Boolean); -}); -exports.repoHasYarnBerryManagedDependencies = repoHasYarnBerryManagedDependencies; -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (cache.isFeatureAvailable()) - return true; - if (isGhes()) { - core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - return false; - } - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - return false; +function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; } -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; - +//# sourceMappingURL=helpers.js.map /***/ }), -/***/ 9042: -/***/ ((__unused_webpack_module, exports) => { +/***/ 33106: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Outputs = exports.State = exports.LockType = void 0; -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType || (exports.LockType = LockType = {})); -var State; -(function (State) { - State["CachePackageManager"] = "SETUP_NODE_CACHE_PACKAGE_MANAGER"; - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; - State["CachePaths"] = "CACHE_PATHS"; -})(State || (exports.State = State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs || (exports.Outputs = Outputs = {})); - +exports.custom = void 0; +const node_util_1 = __nccwpck_require__(47261); +exports.custom = node_util_1.inspect.custom; +//# sourceMappingURL=inspect.js.map /***/ }), -/***/ 2629: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 34472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.unique = exports.printEnvDetailsAndSetOutput = exports.getNodeVersionFromFile = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const io = __importStar(__nccwpck_require__(7436)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const path_1 = __importDefault(__nccwpck_require__(1017)); -function getNodeVersionFromFile(versionFilePath) { - var _a, _b, _c, _d, _e; - if (!fs_1.default.existsSync(versionFilePath)) { - throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); +exports.Sanitizer = void 0; +const core_util_1 = __nccwpck_require__(80637); +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +/** + * @internal + */ +class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); } - const contents = fs_1.default.readFileSync(versionFilePath, 'utf8'); - // Try parsing the file as an NPM `package.json` file. - try { - const manifest = JSON.parse(contents); - // Presume package.json file. - if (typeof manifest === 'object' && !!manifest) { - // Support Volta. - // See https://docs.volta.sh/guide/understanding#managing-your-project - if ((_a = manifest.volta) === null || _a === void 0 ? void 0 : _a.node) { - return manifest.volta.node; + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); } - if ((_b = manifest.engines) === null || _b === void 0 ? void 0 : _b.node) { - return manifest.engines.node; + if (key === "headers") { + return this.sanitizeHeaders(value); } - // Support Volta workspaces. - // See https://docs.volta.sh/advanced/workspaces - if ((_c = manifest.volta) === null || _c === void 0 ? void 0 : _c.extends) { - const extendedFilePath = path_1.default.resolve(path_1.default.dirname(versionFilePath), manifest.volta.extends); - core.info('Resolving node version from ' + extendedFilePath); - return getNodeVersionFromFile(extendedFilePath); + else if (key === "url") { + return this.sanitizeUrl(value); } - // If contents are an object, we parsed JSON - // this can happen if node-version-file is a package.json - // yet contains no volta.node or engines.node - // - // If node-version file is _not_ JSON, control flow - // will not have reached these lines. - // - // And because we've reached here, we know the contents - // *are* JSON, so no further string parsing makes sense. - return null; + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || (0, core_util_1.isObject)(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null || value === "") { + return value; } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); } - catch (_f) { - core.info('Node version file is not JSON file'); + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; + } + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; } - const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); - return (_e = (_d = found === null || found === void 0 ? void 0 : found.groups) === null || _d === void 0 ? void 0 : _d.version) !== null && _e !== void 0 ? _e : contents.trim(); } -exports.getNodeVersionFromFile = getNodeVersionFromFile; -function printEnvDetailsAndSetOutput() { - return __awaiter(this, void 0, void 0, function* () { - core.startGroup('Environment details'); - const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { - const pathTool = yield io.which(tool, false); - const output = pathTool ? yield getToolVersion(tool, ['--version']) : ''; - return { tool, output }; - })); - const tools = yield Promise.all(promises); - tools.forEach(({ tool, output }) => { - if (tool === 'node') { - core.setOutput(`${tool}-version`, output); +exports.Sanitizer = Sanitizer; +//# sourceMappingURL=sanitizer.js.map + +/***/ }), + +/***/ 50601: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_CYCLER_OPTIONS = void 0; +exports.createTokenCycler = createTokenCycler; +const helpers_js_1 = __nccwpck_require__(21333); +// Default options for the cycler if none are provided +exports.DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3000, // Allow refresh attempts every 3s + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); } - core.info(`${tool}: ${output}`); - }); - core.endGroup(); - }); + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await (0, helpers_js_1.delay)(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; } -exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; -function getToolVersion(tool, options) { - return __awaiter(this, void 0, void 0, function* () { - try { - const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { - ignoreReturnCode: true, - silent: true - }); - if (exitCode > 0) { - core.info(`[warning]${stderr}`); - return ''; +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = Object.assign(Object.assign({}, exports.DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + if (cycler.isRefreshing) { + return false; } - return stdout.trim(); + if ((token === null || token === void 0 ? void 0 : token.refreshAfterTimestamp) && token.refreshAfterTimestamp < Date.now()) { + return true; + } + return ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now(); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(scopes, getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + tenantId = undefined; + throw reason; + }); } - catch (err) { - return ''; + return refreshWorker; + } + return async (scopes, tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + const hasClaimChallenge = Boolean(tokenOptions.claims); + const tenantIdChanged = tenantId !== tokenOptions.tenantId; + if (hasClaimChallenge) { + // If we've received a claim, we know the existing token isn't valid + // We want to clear it so that that refresh worker won't use the old expiration time as a timeout + token = null; + } + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh; + if (mustRefresh) { + return refresh(scopes, tokenOptions); + } + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); } - }); -} -const unique = () => { - const encountered = new Set(); - return (value) => { - if (encountered.has(value)) - return false; - encountered.add(value); - return true; + return token; }; -}; -exports.unique = unique; - +} +//# sourceMappingURL=tokenCycler.js.map /***/ }), -/***/ 2877: -/***/ ((module) => { +/***/ 58520: +/***/ ((__unused_webpack_module, exports) => { -module.exports = eval("require")("encoding"); +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isNodeReadableStream = isNodeReadableStream; +exports.isWebReadableStream = isWebReadableStream; +exports.isReadableStream = isReadableStream; +exports.isBlob = isBlob; +function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); +} +function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); +} +function isBlob(x) { + return typeof x.stream === "function"; +} +//# sourceMappingURL=typeGuards.js.map /***/ }), -/***/ 9491: -/***/ ((module) => { +/***/ 96158: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("assert"); - -/***/ }), - -/***/ 852: -/***/ ((module) => { -"use strict"; -module.exports = require("async_hooks"); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUserAgentHeaderName = getUserAgentHeaderName; +exports.getUserAgentValue = getUserAgentValue; +const userAgentPlatform_js_1 = __nccwpck_require__(15316); +const constants_js_1 = __nccwpck_require__(43171); +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); +} +/** + * @internal + */ +function getUserAgentHeaderName() { + return (0, userAgentPlatform_js_1.getHeaderName)(); +} +/** + * @internal + */ +async function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION); + await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; +} +//# sourceMappingURL=userAgent.js.map /***/ }), -/***/ 4300: -/***/ ((module) => { +/***/ 15316: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("buffer"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHeaderName = getHeaderName; +exports.setPlatformSpecificData = setPlatformSpecificData; +const tslib_1 = __nccwpck_require__(4351); +const os = tslib_1.__importStar(__nccwpck_require__(70612)); +const process = tslib_1.__importStar(__nccwpck_require__(97742)); +/** + * @internal + */ +function getHeaderName() { + return "User-Agent"; +} +/** + * @internal + */ +async function setPlatformSpecificData(map) { + if (process && process.versions) { + const versions = process.versions; + if (versions.bun) { + map.set("Bun", versions.bun); + } + else if (versions.deno) { + map.set("Deno", versions.deno); + } + else if (versions.node) { + map.set("Node", versions.node); + } + } + map.set("OS", `(${os.arch()}-${os.type()}-${os.release()})`); +} +//# sourceMappingURL=userAgentPlatform.js.map /***/ }), -/***/ 2081: -/***/ ((module) => { +/***/ 59390: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("child_process"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map /***/ }), -/***/ 6206: -/***/ ((module) => { +/***/ 11514: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("console"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +var AbortError_js_1 = __nccwpck_require__(59390); +Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6113: -/***/ ((module) => { +/***/ 19363: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("crypto"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTracingClient = exports.useInstrumenter = void 0; +var instrumenter_js_1 = __nccwpck_require__(63418); +Object.defineProperty(exports, "useInstrumenter", ({ enumerable: true, get: function () { return instrumenter_js_1.useInstrumenter; } })); +var tracingClient_js_1 = __nccwpck_require__(69254); +Object.defineProperty(exports, "createTracingClient", ({ enumerable: true, get: function () { return tracingClient_js_1.createTracingClient; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 7643: -/***/ ((module) => { +/***/ 63418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("diagnostics_channel"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDefaultTracingSpan = createDefaultTracingSpan; +exports.createDefaultInstrumenter = createDefaultInstrumenter; +exports.useInstrumenter = useInstrumenter; +exports.getInstrumenter = getInstrumenter; +const tracingContext_js_1 = __nccwpck_require__(18110); +const state_js_1 = __nccwpck_require__(81241); +function createDefaultTracingSpan() { + return { + end: () => { + // noop + }, + isRecording: () => false, + recordException: () => { + // noop + }, + setAttribute: () => { + // noop + }, + setStatus: () => { + // noop + }, + addEvent: () => { + // noop + }, + }; +} +function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return undefined; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }), + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + }, + }; +} +/** + * Extends the Azure SDK with support for a given instrumenter implementation. + * + * @param instrumenter - The instrumenter implementation to use. + */ +function useInstrumenter(instrumenter) { + state_js_1.state.instrumenterImplementation = instrumenter; +} +/** + * Gets the currently set instrumenter, a No-Op instrumenter by default. + * + * @returns The currently set instrumenter + */ +function getInstrumenter() { + if (!state_js_1.state.instrumenterImplementation) { + state_js_1.state.instrumenterImplementation = createDefaultInstrumenter(); + } + return state_js_1.state.instrumenterImplementation; +} +//# sourceMappingURL=instrumenter.js.map /***/ }), -/***/ 2361: -/***/ ((module) => { +/***/ 81241: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("events"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.state = void 0; +/** + * @internal + * + * Holds the singleton instrumenter, to be shared across CJS and ESM imports. + */ +exports.state = { + instrumenterImplementation: undefined, +}; +//# sourceMappingURL=state-cjs.cjs.map /***/ }), -/***/ 7147: -/***/ ((module) => { +/***/ 69254: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("fs"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTracingClient = createTracingClient; +const instrumenter_js_1 = __nccwpck_require__(63418); +const tracingContext_js_1 = __nccwpck_require__(18110); +/** + * Creates a new tracing client. + * + * @param options - Options used to configure the tracing client. + * @returns - An instance of {@link TracingClient}. + */ +function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + var _a; + const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); + } + span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), + }); + return { + span, + updatedOptions, + }; + } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } + catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } + finally { + span.end(); + } + } + function withContext(context, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context, callback, ...callbackArgs); + } + /** + * Parses a traceparent header value into a span identifier. + * + * @param traceparentHeader - The traceparent header to parse. + * @returns An implementation-specific identifier for the span. + */ + function parseTraceparentHeader(traceparentHeader) { + return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); + } + /** + * Creates a set of request headers to propagate tracing information to a backend. + * + * @param tracingContext - The context containing the span to serialize. + * @returns The set of headers to add to a request. + */ + function createRequestHeaders(tracingContext) { + return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext); + } + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders, + }; +} +//# sourceMappingURL=tracingClient.js.map /***/ }), -/***/ 3685: -/***/ ((module) => { +/***/ 18110: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("http"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TracingContextImpl = exports.knownContextKeys = void 0; +exports.createTracingContext = createTracingContext; +/** @internal */ +exports.knownContextKeys = { + span: Symbol.for("@azure/core-tracing span"), + namespace: Symbol.for("@azure/core-tracing namespace"), +}; +/** + * Creates a new {@link TracingContext} with the given options. + * @param options - A set of known keys that may be set on the context. + * @returns A new {@link TracingContext} with the given options. + * + * @internal + */ +function createTracingContext(options = {}) { + let context = new TracingContextImpl(options.parentContext); + if (options.span) { + context = context.setValue(exports.knownContextKeys.span, options.span); + } + if (options.namespace) { + context = context.setValue(exports.knownContextKeys.namespace, options.namespace); + } + return context; +} +/** @internal */ +class TracingContextImpl { + constructor(initialContext) { + this._contextMap = + initialContext instanceof TracingContextImpl + ? new Map(initialContext._contextMap) + : new Map(); + } + setValue(key, value) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; + } + getValue(key) { + return this._contextMap.get(key); + } + deleteValue(key) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; + } +} +exports.TracingContextImpl = TracingContextImpl; +//# sourceMappingURL=tracingContext.js.map /***/ }), -/***/ 5158: -/***/ ((module) => { +/***/ 87205: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("http2"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cancelablePromiseRace = cancelablePromiseRace; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); + } + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); + } +} +//# sourceMappingURL=aborterUtils.js.map /***/ }), -/***/ 5687: -/***/ ((module) => { +/***/ 9972: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("https"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uint8ArrayToString = uint8ArrayToString; +exports.stringToUint8Array = stringToUint8Array; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} +//# sourceMappingURL=bytesEncoding.js.map /***/ }), -/***/ 1808: -/***/ ((module) => { +/***/ 97980: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("net"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +var _a, _b, _c, _d; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isReactNative = exports.isNodeRuntime = exports.isNode = exports.isNodeLike = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +exports.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +exports.isWebWorker = typeof self === "object" && + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +exports.isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + */ +exports.isNodeLike = typeof globalThis.process !== "undefined" && + Boolean(globalThis.process.version) && + Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + * @deprecated Use `isNodeLike` instead. + */ +exports.isNode = exports.isNodeLike; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +exports.isNodeRuntime = exports.isNodeLike && !exports.isBun && !exports.isDeno; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +exports.isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; +//# sourceMappingURL=checkEnvironment.js.map /***/ }), -/***/ 5673: -/***/ ((module) => { +/***/ 12376: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:events"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createAbortablePromise = createAbortablePromise; +const abort_controller_1 = __nccwpck_require__(54812); +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new abort_controller_1.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); + } + function removeListeners() { + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); + } + function onAbort() { + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); + removeListeners(); + rejectOnAbort(); + } + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); + }); + } + catch (err) { + reject(err); + } + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); + }); +} +//# sourceMappingURL=createAbortablePromise.js.map /***/ }), -/***/ 4492: -/***/ ((module) => { +/***/ 19259: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:stream"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.delay = delay; +exports.calculateRetryDelay = calculateRetryDelay; +const createAbortablePromise_js_1 = __nccwpck_require__(12376); +const random_js_1 = __nccwpck_require__(93710); +const StandardAbortMessage = "The delay was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; + return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, + }); +} +/** + * Calculates the delay interval for retry attempts using exponential delay with jitter. + * @param retryAttempt - The current retry attempt number. + * @param config - The exponential retry configuration. + * @returns An object containing the calculated retry delay. + */ +function calculateRetryDelay(retryAttempt, config) { + // Exponentially increase the delay each time + const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); + // Don't let the delay exceed the maximum + const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); + return { retryAfterInMs }; +} +//# sourceMappingURL=delay.js.map /***/ }), -/***/ 7261: -/***/ ((module) => { +/***/ 46734: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:util"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isError = isError; +exports.getErrorMessage = getErrorMessage; +const object_js_1 = __nccwpck_require__(56538); +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +function isError(e) { + if ((0, object_js_1.isObject)(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; +} +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +function getErrorMessage(e) { + if (isError(e)) { + return e.message; + } + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } + else { + stringified = String(e); + } + } + catch (err) { + stringified = "[unable to stringify input]"; + } + return `Unknown error ${stringified}`; + } +} +//# sourceMappingURL=error.js.map /***/ }), -/***/ 2037: -/***/ ((module) => { +/***/ 80637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("os"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNodeRuntime = exports.isNodeLike = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.calculateRetryDelay = exports.delay = void 0; +var delay_js_1 = __nccwpck_require__(19259); +Object.defineProperty(exports, "delay", ({ enumerable: true, get: function () { return delay_js_1.delay; } })); +Object.defineProperty(exports, "calculateRetryDelay", ({ enumerable: true, get: function () { return delay_js_1.calculateRetryDelay; } })); +var aborterUtils_js_1 = __nccwpck_require__(87205); +Object.defineProperty(exports, "cancelablePromiseRace", ({ enumerable: true, get: function () { return aborterUtils_js_1.cancelablePromiseRace; } })); +var createAbortablePromise_js_1 = __nccwpck_require__(12376); +Object.defineProperty(exports, "createAbortablePromise", ({ enumerable: true, get: function () { return createAbortablePromise_js_1.createAbortablePromise; } })); +var random_js_1 = __nccwpck_require__(93710); +Object.defineProperty(exports, "getRandomIntegerInclusive", ({ enumerable: true, get: function () { return random_js_1.getRandomIntegerInclusive; } })); +var object_js_1 = __nccwpck_require__(56538); +Object.defineProperty(exports, "isObject", ({ enumerable: true, get: function () { return object_js_1.isObject; } })); +var error_js_1 = __nccwpck_require__(46734); +Object.defineProperty(exports, "isError", ({ enumerable: true, get: function () { return error_js_1.isError; } })); +Object.defineProperty(exports, "getErrorMessage", ({ enumerable: true, get: function () { return error_js_1.getErrorMessage; } })); +var sha256_js_1 = __nccwpck_require__(94793); +Object.defineProperty(exports, "computeSha256Hash", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hash; } })); +Object.defineProperty(exports, "computeSha256Hmac", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hmac; } })); +var typeGuards_js_1 = __nccwpck_require__(1187); +Object.defineProperty(exports, "isDefined", ({ enumerable: true, get: function () { return typeGuards_js_1.isDefined; } })); +Object.defineProperty(exports, "isObjectWithProperties", ({ enumerable: true, get: function () { return typeGuards_js_1.isObjectWithProperties; } })); +Object.defineProperty(exports, "objectHasProperty", ({ enumerable: true, get: function () { return typeGuards_js_1.objectHasProperty; } })); +var uuidUtils_js_1 = __nccwpck_require__(17658); +Object.defineProperty(exports, "randomUUID", ({ enumerable: true, get: function () { return uuidUtils_js_1.randomUUID; } })); +var checkEnvironment_js_1 = __nccwpck_require__(97980); +Object.defineProperty(exports, "isBrowser", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBrowser; } })); +Object.defineProperty(exports, "isBun", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBun; } })); +Object.defineProperty(exports, "isNode", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNode; } })); +Object.defineProperty(exports, "isNodeLike", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeLike; } })); +Object.defineProperty(exports, "isNodeRuntime", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeRuntime; } })); +Object.defineProperty(exports, "isDeno", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isDeno; } })); +Object.defineProperty(exports, "isReactNative", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isReactNative; } })); +Object.defineProperty(exports, "isWebWorker", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isWebWorker; } })); +var bytesEncoding_js_1 = __nccwpck_require__(9972); +Object.defineProperty(exports, "uint8ArrayToString", ({ enumerable: true, get: function () { return bytesEncoding_js_1.uint8ArrayToString; } })); +Object.defineProperty(exports, "stringToUint8Array", ({ enumerable: true, get: function () { return bytesEncoding_js_1.stringToUint8Array; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 1017: -/***/ ((module) => { +/***/ 56538: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("path"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isObject = isObject; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=object.js.map /***/ }), -/***/ 4074: -/***/ ((module) => { +/***/ 93710: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("perf_hooks"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRandomIntegerInclusive = getRandomIntegerInclusive; +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; +} +//# sourceMappingURL=random.js.map /***/ }), -/***/ 5477: -/***/ ((module) => { +/***/ 94793: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("punycode"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.computeSha256Hmac = computeSha256Hmac; +exports.computeSha256Hash = computeSha256Hash; +const crypto_1 = __nccwpck_require__(6113); +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return (0, crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); +} +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +async function computeSha256Hash(content, encoding) { + return (0, crypto_1.createHash)("sha256").update(content).digest(encoding); +} +//# sourceMappingURL=sha256.js.map /***/ }), -/***/ 3477: -/***/ ((module) => { +/***/ 1187: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("querystring"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isDefined = isDefined; +exports.isObjectWithProperties = isObjectWithProperties; +exports.objectHasProperty = objectHasProperty; +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} +//# sourceMappingURL=typeGuards.js.map /***/ }), -/***/ 2781: -/***/ ((module) => { +/***/ 17658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("stream"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.randomUUID = randomUUID; +const crypto_1 = __nccwpck_require__(6113); +// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. +const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" + ? globalThis.crypto.randomUUID.bind(globalThis.crypto) + : crypto_1.randomUUID; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +function randomUUID() { + return uuidFunction(); +} +//# sourceMappingURL=uuidUtils.js.map /***/ }), -/***/ 5356: -/***/ ((module) => { +/***/ 42118: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("stream/web"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map /***/ }), -/***/ 1576: -/***/ ((module) => { +/***/ 54812: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("string_decoder"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +var AbortError_js_1 = __nccwpck_require__(42118); +Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 9512: -/***/ ((module) => { +/***/ 17309: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("timers"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.parseXML = exports.stringifyXML = void 0; +var xml_js_1 = __nccwpck_require__(39170); +Object.defineProperty(exports, "stringifyXML", ({ enumerable: true, get: function () { return xml_js_1.stringifyXML; } })); +Object.defineProperty(exports, "parseXML", ({ enumerable: true, get: function () { return xml_js_1.parseXML; } })); +var xml_common_js_1 = __nccwpck_require__(62060); +Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_ATTRKEY; } })); +Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_CHARKEY; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 4404: -/***/ ((module) => { +/***/ 62060: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("tls"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; +/** + * Default key used to access the XML attributes. + */ +exports.XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +exports.XML_CHARKEY = "_"; +//# sourceMappingURL=xml.common.js.map /***/ }), -/***/ 6224: -/***/ ((module) => { +/***/ 39170: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("tty"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.stringifyXML = stringifyXML; +exports.parseXML = parseXML; +const fast_xml_parser_1 = __nccwpck_require__(12603); +const xml_common_js_1 = __nccwpck_require__(62060); +function getCommonOptions(options) { + var _a; + return { + attributesGroupName: xml_common_js_1.XML_ATTRKEY, + textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY, + ignoreAttributes: false, + suppressBooleanAttributes: false, + }; +} +function getSerializerOptions(options = {}) { + var _a, _b; + return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); +} +function getParserOptions(options = {}) { + return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true }); +} +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the XML building of given JSON object + * `rootName` indicates the name of the root element in the resulting XML + */ +function stringifyXML(obj, opts = {}) { + const parserOptions = getSerializerOptions(opts); + const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); + const node = { [parserOptions.rootNodeName]: obj }; + const xmlData = j2x.build(node); + return `${xmlData}`.replace(/\n/g, ""); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + * `includeRoot` indicates whether the root element is to be included or not in the output + */ +async function parseXML(str, opts = {}) { + if (!str) { + throw new Error("Document is empty"); + } + const validation = fast_xml_parser_1.XMLValidator.validate(str); + if (validation !== true) { + throw validation; + } + const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); + const parsedXml = parser.parse(str); + // Remove the node. + // This is a change in behavior on fxp v4. Issue #424 + if (parsedXml["?xml"]) { + delete parsedXml["?xml"]; + } + if (!opts.includeRoot) { + for (const key of Object.keys(parsedXml)) { + const value = parsedXml[key]; + return typeof value === "object" ? Object.assign({}, value) : value; + } + } + return parsedXml; +} +//# sourceMappingURL=xml.js.map /***/ }), -/***/ 7310: -/***/ ((module) => { +/***/ 80162: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("url"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const log_js_1 = __nccwpck_require__(18898); +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log: log_js_1.log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +exports["default"] = debugObj; +//# sourceMappingURL=debug.js.map /***/ }), -/***/ 3837: -/***/ ((module) => { +/***/ 89497: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("util"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureLogger = void 0; +exports.setLogLevel = setLogLevel; +exports.getLogLevel = getLogLevel; +exports.createClientLogger = createClientLogger; +const tslib_1 = __nccwpck_require__(4351); +const debug_js_1 = tslib_1.__importDefault(__nccwpck_require__(80162)); +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +exports.AzureLogger = (0, debug_js_1.default)("azure"); +exports.AzureLogger.log = (...args) => { + debug_js_1.default.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug_js_1.default.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +function createClientLogger(namespace) { + const clientRootLogger = exports.AzureLogger.extend(namespace); + patchLogMethod(exports.AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug_js_1.default.disable(); + debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map /***/ }), -/***/ 9830: -/***/ ((module) => { +/***/ 18898: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("util/types"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.log = log; +const tslib_1 = __nccwpck_require__(4351); +const node_os_1 = __nccwpck_require__(70612); +const node_util_1 = tslib_1.__importDefault(__nccwpck_require__(47261)); +const process = tslib_1.__importStar(__nccwpck_require__(97742)); +function log(message, ...args) { + process.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); +} +//# sourceMappingURL=log.js.map /***/ }), -/***/ 1267: -/***/ ((module) => { +/***/ 58412: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("worker_threads"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map /***/ }), -/***/ 9796: -/***/ ((module) => { +/***/ 1753: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("zlib"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +var AbortError_js_1 = __nccwpck_require__(58412); +Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 2989: +/***/ 22989: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -68249,7 +107666,7 @@ module.exports = require("zlib"); // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AclRoleAccessorMethods = exports.Acl = void 0; -const promisify_1 = __nccwpck_require__(9203); +const promisify_1 = __nccwpck_require__(19203); /** * Attach functionality to a {@link Storage.acl} instance. This will add an * object for each role group (owners, readers, and writers), with each object @@ -68955,7 +108372,7 @@ exports.Acl = Acl; /***/ }), -/***/ 2963: +/***/ 23973: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -69002,23 +108419,23 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; const index_js_1 = __nccwpck_require__(4052); -const paginator_1 = __nccwpck_require__(6412); -const promisify_1 = __nccwpck_require__(9203); -const fs = __importStar(__nccwpck_require__(7147)); -const mime_1 = __importDefault(__nccwpck_require__(9994)); -const path = __importStar(__nccwpck_require__(1017)); -const p_limit_1 = __importDefault(__nccwpck_require__(7684)); -const util_1 = __nccwpck_require__(3837); -const async_retry_1 = __importDefault(__nccwpck_require__(3415)); -const util_js_1 = __nccwpck_require__(9258); -const acl_js_1 = __nccwpck_require__(2989); +const paginator_1 = __nccwpck_require__(46412); +const promisify_1 = __nccwpck_require__(19203); +const fs = __importStar(__nccwpck_require__(57147)); +const mime_1 = __importDefault(__nccwpck_require__(29994)); +const path = __importStar(__nccwpck_require__(71017)); +const p_limit_1 = __importDefault(__nccwpck_require__(57684)); +const util_1 = __nccwpck_require__(73837); +const async_retry_1 = __importDefault(__nccwpck_require__(33415)); +const util_js_1 = __nccwpck_require__(59258); +const acl_js_1 = __nccwpck_require__(22989); const file_js_1 = __nccwpck_require__(4713); -const iam_js_1 = __nccwpck_require__(352); -const notification_js_1 = __nccwpck_require__(1178); -const storage_js_1 = __nccwpck_require__(3030); -const signer_js_1 = __nccwpck_require__(9019); -const stream_1 = __nccwpck_require__(2781); -const url_1 = __nccwpck_require__(7310); +const iam_js_1 = __nccwpck_require__(80352); +const notification_js_1 = __nccwpck_require__(21178); +const storage_js_1 = __nccwpck_require__(33030); +const signer_js_1 = __nccwpck_require__(59019); +const stream_1 = __nccwpck_require__(12781); +const url_1 = __nccwpck_require__(57310); var BucketActionToHTTPMethod; (function (BucketActionToHTTPMethod) { BucketActionToHTTPMethod["list"] = "GET"; @@ -72487,7 +111904,7 @@ paginator_1.paginator.extend(Bucket, 'getFiles'); /***/ }), -/***/ 2665: +/***/ 62665: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -72508,7 +111925,7 @@ paginator_1.paginator.extend(Bucket, 'getFiles'); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Channel = void 0; const index_js_1 = __nccwpck_require__(4052); -const promisify_1 = __nccwpck_require__(9203); +const promisify_1 = __nccwpck_require__(19203); /** * Create a channel object to interact with a Cloud Storage channel. * @@ -72599,7 +112016,7 @@ exports.Channel = Channel; /***/ }), -/***/ 5810: +/***/ 55810: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -72631,7 +112048,7 @@ var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function ( var _CRC32C_crc32c; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; -const fs_1 = __nccwpck_require__(7147); +const fs_1 = __nccwpck_require__(57147); /** * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} */ @@ -72914,23 +112331,23 @@ var _File_instances, _File_validateIntegrity; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; const index_js_1 = __nccwpck_require__(4052); -const promisify_1 = __nccwpck_require__(9203); +const promisify_1 = __nccwpck_require__(19203); const crypto = __importStar(__nccwpck_require__(6113)); -const fs = __importStar(__nccwpck_require__(7147)); -const mime_1 = __importDefault(__nccwpck_require__(9994)); -const resumableUpload = __importStar(__nccwpck_require__(2851)); -const stream_1 = __nccwpck_require__(2781); -const zlib = __importStar(__nccwpck_require__(9796)); -const storage_js_1 = __nccwpck_require__(3030); -const bucket_js_1 = __nccwpck_require__(2963); -const acl_js_1 = __nccwpck_require__(2989); -const signer_js_1 = __nccwpck_require__(9019); -const util_js_1 = __nccwpck_require__(8064); -const duplexify_1 = __importDefault(__nccwpck_require__(6599)); -const util_js_2 = __nccwpck_require__(9258); -const crc32c_js_1 = __nccwpck_require__(5810); -const hash_stream_validator_js_1 = __nccwpck_require__(725); -const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const fs = __importStar(__nccwpck_require__(57147)); +const mime_1 = __importDefault(__nccwpck_require__(29994)); +const resumableUpload = __importStar(__nccwpck_require__(42851)); +const stream_1 = __nccwpck_require__(12781); +const zlib = __importStar(__nccwpck_require__(59796)); +const storage_js_1 = __nccwpck_require__(33030); +const bucket_js_1 = __nccwpck_require__(23973); +const acl_js_1 = __nccwpck_require__(22989); +const signer_js_1 = __nccwpck_require__(59019); +const util_js_1 = __nccwpck_require__(38064); +const duplexify_1 = __importDefault(__nccwpck_require__(76599)); +const util_js_2 = __nccwpck_require__(59258); +const crc32c_js_1 = __nccwpck_require__(55810); +const hash_stream_validator_js_1 = __nccwpck_require__(40725); +const async_retry_1 = __importDefault(__nccwpck_require__(33415)); var ActionToHTTPMethod; (function (ActionToHTTPMethod) { ActionToHTTPMethod["read"] = "GET"; @@ -76268,7 +115685,7 @@ async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { /***/ }), -/***/ 725: +/***/ 40725: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -76301,8 +115718,8 @@ var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamVa Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HashStreamValidator = void 0; const crypto_1 = __nccwpck_require__(6113); -const stream_1 = __nccwpck_require__(2781); -const crc32c_js_1 = __nccwpck_require__(5810); +const stream_1 = __nccwpck_require__(12781); +const crc32c_js_1 = __nccwpck_require__(55810); const file_js_1 = __nccwpck_require__(4713); class HashStreamValidator extends stream_1.Transform { constructor(options = {}) { @@ -76395,7 +115812,7 @@ _HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = /***/ }), -/***/ 4654: +/***/ 64654: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -76416,8 +115833,8 @@ _HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HmacKey = void 0; const index_js_1 = __nccwpck_require__(4052); -const storage_js_1 = __nccwpck_require__(3030); -const promisify_1 = __nccwpck_require__(9203); +const storage_js_1 = __nccwpck_require__(33030); +const promisify_1 = __nccwpck_require__(19203); /** * The API-formatted resource description of the HMAC key. * @@ -76739,7 +116156,7 @@ exports.HmacKey = HmacKey; /***/ }), -/***/ 352: +/***/ 80352: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -76759,8 +116176,8 @@ exports.HmacKey = HmacKey; // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Iam = exports.IAMExceptionMessages = void 0; -const promisify_1 = __nccwpck_require__(9203); -const util_js_1 = __nccwpck_require__(9258); +const promisify_1 = __nccwpck_require__(19203); +const util_js_1 = __nccwpck_require__(59258); var IAMExceptionMessages; (function (IAMExceptionMessages) { IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; @@ -77053,7 +116470,7 @@ exports.Iam = Iam; /***/ }), -/***/ 7577: +/***/ 27577: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -77133,24 +116550,24 @@ exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Ch */ var index_js_1 = __nccwpck_require__(4052); Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); -var storage_js_1 = __nccwpck_require__(3030); +var storage_js_1 = __nccwpck_require__(33030); Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); -var bucket_js_1 = __nccwpck_require__(2963); +var bucket_js_1 = __nccwpck_require__(23973); Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); -__exportStar(__nccwpck_require__(5810), exports); -var channel_js_1 = __nccwpck_require__(2665); +__exportStar(__nccwpck_require__(55810), exports); +var channel_js_1 = __nccwpck_require__(62665); Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); var file_js_1 = __nccwpck_require__(4713); Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); -__exportStar(__nccwpck_require__(725), exports); -var hmacKey_js_1 = __nccwpck_require__(4654); +__exportStar(__nccwpck_require__(40725), exports); +var hmacKey_js_1 = __nccwpck_require__(64654); Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); -var iam_js_1 = __nccwpck_require__(352); +var iam_js_1 = __nccwpck_require__(80352); Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); -var notification_js_1 = __nccwpck_require__(1178); +var notification_js_1 = __nccwpck_require__(21178); Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); -__exportStar(__nccwpck_require__(5594), exports); +__exportStar(__nccwpck_require__(45594), exports); /***/ }), @@ -77162,18 +116579,18 @@ __exportStar(__nccwpck_require__(5594), exports); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; -var service_js_1 = __nccwpck_require__(7370); +var service_js_1 = __nccwpck_require__(17370); Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); -var service_object_js_1 = __nccwpck_require__(3409); +var service_object_js_1 = __nccwpck_require__(73409); Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); -var util_js_1 = __nccwpck_require__(8064); +var util_js_1 = __nccwpck_require__(38064); Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); /***/ }), -/***/ 3409: +/***/ 73409: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -77195,9 +116612,9 @@ exports.ServiceObject = void 0; * See the License for the specific language governing permissions and * limitations under the License. */ -const promisify_1 = __nccwpck_require__(9203); -const events_1 = __nccwpck_require__(2361); -const util_js_1 = __nccwpck_require__(8064); +const promisify_1 = __nccwpck_require__(19203); +const events_1 = __nccwpck_require__(82361); +const util_js_1 = __nccwpck_require__(38064); /** * ServiceObject is a base class, meant to be inherited from by a "service * object," like a BigQuery dataset or Storage bucket. @@ -77473,7 +116890,7 @@ exports.ServiceObject = ServiceObject; /***/ }), -/***/ 7370: +/***/ 17370: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -77518,10 +116935,10 @@ exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; * See the License for the specific language governing permissions and * limitations under the License. */ -const google_auth_library_1 = __nccwpck_require__(810); -const uuid = __importStar(__nccwpck_require__(4458)); -const util_js_1 = __nccwpck_require__(8064); -const util_js_2 = __nccwpck_require__(9258); +const google_auth_library_1 = __nccwpck_require__(20810); +const uuid = __importStar(__nccwpck_require__(44458)); +const util_js_1 = __nccwpck_require__(38064); +const util_js_2 = __nccwpck_require__(59258); exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; class Service { /** @@ -77689,7 +117106,7 @@ exports.Service = Service; /***/ }), -/***/ 8064: +/***/ 38064: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -77741,18 +117158,18 @@ exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = e * @module common/util */ const projectify_1 = __nccwpck_require__(3497); -const htmlEntities = __importStar(__nccwpck_require__(2589)); -const google_auth_library_1 = __nccwpck_require__(810); -const retry_request_1 = __importDefault(__nccwpck_require__(3515)); -const stream_1 = __nccwpck_require__(2781); +const htmlEntities = __importStar(__nccwpck_require__(52589)); +const google_auth_library_1 = __nccwpck_require__(20810); +const retry_request_1 = __importDefault(__nccwpck_require__(63515)); +const stream_1 = __nccwpck_require__(12781); const teeny_request_1 = __nccwpck_require__(6886); -const uuid = __importStar(__nccwpck_require__(4458)); -const service_js_1 = __nccwpck_require__(7370); -const util_js_1 = __nccwpck_require__(9258); -const duplexify_1 = __importDefault(__nccwpck_require__(6599)); +const uuid = __importStar(__nccwpck_require__(44458)); +const service_js_1 = __nccwpck_require__(17370); +const util_js_1 = __nccwpck_require__(59258); +const duplexify_1 = __importDefault(__nccwpck_require__(76599)); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore -const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const package_json_helper_cjs_1 = __nccwpck_require__(28568); const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); /** * A unique symbol for providing a `gccl-gcs-cmd` value @@ -78398,7 +117815,7 @@ exports.util = util; /***/ }), -/***/ 1178: +/***/ 21178: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -78419,7 +117836,7 @@ exports.util = util; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Notification = void 0; const index_js_1 = __nccwpck_require__(4052); -const promisify_1 = __nccwpck_require__(9203); +const promisify_1 = __nccwpck_require__(19203); /** * The API-formatted resource description of the notification. * @@ -78671,7 +118088,7 @@ exports.Notification = Notification; /***/ }), -/***/ 2851: +/***/ 42851: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -78729,18 +118146,18 @@ var __importDefault = (this && this.__importDefault) || function (mod) { var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0; -const abort_controller_1 = __importDefault(__nccwpck_require__(1659)); +const abort_controller_1 = __importDefault(__nccwpck_require__(61659)); const crypto_1 = __nccwpck_require__(6113); -const gaxios = __importStar(__nccwpck_require__(9555)); -const google_auth_library_1 = __nccwpck_require__(810); -const stream_1 = __nccwpck_require__(2781); -const async_retry_1 = __importDefault(__nccwpck_require__(3415)); -const uuid = __importStar(__nccwpck_require__(4458)); -const util_js_1 = __nccwpck_require__(9258); -const util_js_2 = __nccwpck_require__(8064); +const gaxios = __importStar(__nccwpck_require__(59555)); +const google_auth_library_1 = __nccwpck_require__(20810); +const stream_1 = __nccwpck_require__(12781); +const async_retry_1 = __importDefault(__nccwpck_require__(33415)); +const uuid = __importStar(__nccwpck_require__(44458)); +const util_js_1 = __nccwpck_require__(59258); +const util_js_2 = __nccwpck_require__(38064); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore -const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const package_json_helper_cjs_1 = __nccwpck_require__(28568); const NOT_FOUND_STATUS_CODE = 404; const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); @@ -79566,7 +118983,7 @@ exports.checkUploadStatus = checkUploadStatus; /***/ }), -/***/ 9019: +/***/ 59019: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -79610,9 +119027,9 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; const crypto = __importStar(__nccwpck_require__(6113)); -const url = __importStar(__nccwpck_require__(7310)); -const storage_js_1 = __nccwpck_require__(3030); -const util_js_1 = __nccwpck_require__(9258); +const url = __importStar(__nccwpck_require__(57310)); +const storage_js_1 = __nccwpck_require__(33030); +const util_js_1 = __nccwpck_require__(59258); var SignerExceptionMessages; (function (SignerExceptionMessages) { SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; @@ -79901,7 +119318,7 @@ exports.SigningError = SigningError; /***/ }), -/***/ 3030: +/***/ 33030: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -79922,19 +119339,19 @@ exports.SigningError = SigningError; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; const index_js_1 = __nccwpck_require__(4052); -const paginator_1 = __nccwpck_require__(6412); -const promisify_1 = __nccwpck_require__(9203); -const stream_1 = __nccwpck_require__(2781); -const bucket_js_1 = __nccwpck_require__(2963); -const channel_js_1 = __nccwpck_require__(2665); +const paginator_1 = __nccwpck_require__(46412); +const promisify_1 = __nccwpck_require__(19203); +const stream_1 = __nccwpck_require__(12781); +const bucket_js_1 = __nccwpck_require__(23973); +const channel_js_1 = __nccwpck_require__(62665); const file_js_1 = __nccwpck_require__(4713); -const util_js_1 = __nccwpck_require__(9258); +const util_js_1 = __nccwpck_require__(59258); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore -const package_json_helper_cjs_1 = __nccwpck_require__(8568); -const hmacKey_js_1 = __nccwpck_require__(4654); -const crc32c_js_1 = __nccwpck_require__(5810); -const google_auth_library_1 = __nccwpck_require__(810); +const package_json_helper_cjs_1 = __nccwpck_require__(28568); +const hmacKey_js_1 = __nccwpck_require__(64654); +const crc32c_js_1 = __nccwpck_require__(55810); +const google_auth_library_1 = __nccwpck_require__(20810); var IdempotencyStrategy; (function (IdempotencyStrategy) { IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; @@ -81059,7 +120476,7 @@ paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); /***/ }), -/***/ 5594: +/***/ 45594: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -81114,19 +120531,19 @@ var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiCli Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TransferManager = exports.MultiPartUploadError = void 0; const file_js_1 = __nccwpck_require__(4713); -const p_limit_1 = __importDefault(__nccwpck_require__(7684)); -const path = __importStar(__nccwpck_require__(1017)); -const fs_1 = __nccwpck_require__(7147); -const crc32c_js_1 = __nccwpck_require__(5810); -const google_auth_library_1 = __nccwpck_require__(810); -const fast_xml_parser_1 = __nccwpck_require__(2603); -const async_retry_1 = __importDefault(__nccwpck_require__(3415)); +const p_limit_1 = __importDefault(__nccwpck_require__(57684)); +const path = __importStar(__nccwpck_require__(71017)); +const fs_1 = __nccwpck_require__(57147); +const crc32c_js_1 = __nccwpck_require__(55810); +const google_auth_library_1 = __nccwpck_require__(20810); +const fast_xml_parser_1 = __nccwpck_require__(12603); +const async_retry_1 = __importDefault(__nccwpck_require__(33415)); const crypto_1 = __nccwpck_require__(6113); -const util_js_1 = __nccwpck_require__(8064); -const util_js_2 = __nccwpck_require__(9258); +const util_js_1 = __nccwpck_require__(38064); +const util_js_2 = __nccwpck_require__(59258); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore -const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const package_json_helper_cjs_1 = __nccwpck_require__(28568); const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); /** * Default number of concurrently executing promises to use when calling uploadManyFiles. @@ -81743,7 +121160,7 @@ exports.TransferManager = TransferManager; /***/ }), -/***/ 9258: +/***/ 59258: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -81797,13 +121214,13 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PassThroughShim = exports.getModuleFormat = exports.getDirName = exports.getUserAgentString = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0; -const path = __importStar(__nccwpck_require__(1017)); -const querystring = __importStar(__nccwpck_require__(3477)); -const stream_1 = __nccwpck_require__(2781); -const url = __importStar(__nccwpck_require__(7310)); +const path = __importStar(__nccwpck_require__(71017)); +const querystring = __importStar(__nccwpck_require__(63477)); +const stream_1 = __nccwpck_require__(12781); +const url = __importStar(__nccwpck_require__(57310)); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore -const package_json_helper_cjs_1 = __nccwpck_require__(8568); +const package_json_helper_cjs_1 = __nccwpck_require__(28568); // Done to avoid a problem with mangling of identifiers when using esModuleInterop const fileURLToPath = url.fileURLToPath; const isEsm = false; @@ -82021,7 +121438,7 @@ exports.PassThroughShim = PassThroughShim; /***/ }), -/***/ 8568: +/***/ 28568: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { // Copyright 2023 Google LLC @@ -82041,7 +121458,7 @@ exports.PassThroughShim = PassThroughShim; /* eslint-disable node/no-missing-require */ function getPackageJSON() { - return __nccwpck_require__(5458); + return __nccwpck_require__(35458); } exports.getPackageJSON = getPackageJSON; @@ -82049,23 +121466,23 @@ exports.getPackageJSON = getPackageJSON; /***/ }), -/***/ 8757: +/***/ 88757: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Axios v1.7.2 Copyright (c) 2024 Matt Zabriskie and contributors -const FormData$1 = __nccwpck_require__(1403); -const url = __nccwpck_require__(7310); -const proxyFromEnv = __nccwpck_require__(3329); -const http = __nccwpck_require__(3685); -const https = __nccwpck_require__(5687); -const util = __nccwpck_require__(3837); -const followRedirects = __nccwpck_require__(7707); -const zlib = __nccwpck_require__(9796); -const stream = __nccwpck_require__(2781); -const events = __nccwpck_require__(2361); +const FormData$1 = __nccwpck_require__(91403); +const url = __nccwpck_require__(57310); +const proxyFromEnv = __nccwpck_require__(63329); +const http = __nccwpck_require__(13685); +const https = __nccwpck_require__(95687); +const util = __nccwpck_require__(73837); +const followRedirects = __nccwpck_require__(67707); +const zlib = __nccwpck_require__(59796); +const stream = __nccwpck_require__(12781); +const events = __nccwpck_require__(82361); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } @@ -86762,7 +126179,7 @@ module.exports = axios; /***/ }), -/***/ 5458: +/***/ 35458: /***/ ((module) => { "use strict"; @@ -86778,7 +126195,7 @@ module.exports = JSON.parse('{"name":"gaxios","version":"6.6.0","description":"A /***/ }), -/***/ 1402: +/***/ 51402: /***/ ((module) => { "use strict"; @@ -86786,7 +126203,7 @@ module.exports = JSON.parse('{"name":"google-auth-library","version":"9.10.0","a /***/ }), -/***/ 3765: +/***/ 53765: /***/ ((module) => { "use strict"; @@ -86794,7 +126211,7 @@ module.exports = JSON.parse('{"application/1d-interleaved-parityfec":{"source":" /***/ }), -/***/ 2020: +/***/ 72020: /***/ ((module) => { "use strict"; @@ -86844,7 +126261,7 @@ module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"] /******/ // startup /******/ // Load entry module and return exports /******/ // This entry module is referenced by other modules so it can't be inlined -/******/ var __webpack_exports__ = __nccwpck_require__(4553); +/******/ var __webpack_exports__ = __nccwpck_require__(14553); /******/ module.exports = __webpack_exports__; /******/ /******/ })() diff --git a/dist/setup/index.js b/dist/setup/index.js index 4d4858ee4..d362e55cc 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -99,7 +99,7 @@ exports.isFeatureAvailable = isFeatureAvailable; * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false, enableCrossArchArchive = false) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t; + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(primaryKey); @@ -210,6 +210,30 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch core.info('Cache restored successfully'); break; } + case 'azure_blob': { + if (!((_u = cacheEntry.azure_blob) === null || _u === void 0 ? void 0 : _u.pre_signed_url)) { + return undefined; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheKey; + } + try { + yield cacheHttpClient.downloadCache(cacheEntry.provider, (_v = cacheEntry.azure_blob) === null || _v === void 0 ? void 0 : _v.pre_signed_url, archivePath); + } + catch (error) { + core.info('Cache Miss. Failed to download cache.'); + return undefined; + } + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + break; + } } return cacheKey; } @@ -246,7 +270,7 @@ exports.restoreCache = restoreCache; * @returns string returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, enableCrossOsArchive = false, enableCrossArchArchive = false) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y; + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); @@ -299,6 +323,13 @@ function saveCache(paths, key, enableCrossOsArchive = false, enableCrossArchArch // S3 Params are undefined for GCS undefined, undefined, undefined, undefined, (_s = (_r = (_q = (_p = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _p === void 0 ? void 0 : _p.gcs) === null || _q === void 0 ? void 0 : _q.short_lived_token) === null || _r === void 0 ? void 0 : _r.access_token) !== null && _s !== void 0 ? _s : '', (_v = (_u = (_t = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _t === void 0 ? void 0 : _t.gcs) === null || _u === void 0 ? void 0 : _u.bucket_name) !== null && _v !== void 0 ? _v : '', (_y = (_x = (_w = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _w === void 0 ? void 0 : _w.gcs) === null || _x === void 0 ? void 0 : _x.cache_key) !== null && _y !== void 0 ? _y : ''); break; + case 'azure_blob': + core.debug(`Saving Cache to Azure Blob`); + cacheKey = yield cacheHttpClient.saveCache('azure_blob', key, cacheVersion, archivePath, + // S3 Params are undefined for GCS + undefined, undefined, undefined, undefined, + // GCS Params are undefined for Azure Blob + undefined, undefined, undefined, (_1 = (_0 = (_z = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _z === void 0 ? void 0 : _z.azure_blob) === null || _0 === void 0 ? void 0 : _0.pre_signed_url) !== null && _1 !== void 0 ? _1 : ''); } } catch (error) { @@ -393,6 +424,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.reserveCache = exports.downloadCacheStreaming = exports.downloadCacheSingleThread = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(42186)); +const github = __importStar(__nccwpck_require__(28294)); const http_client_1 = __nccwpck_require__(96255); const auth_1 = __nccwpck_require__(35526); const crypto = __importStar(__nccwpck_require__(6113)); @@ -403,6 +435,7 @@ const requestUtils_1 = __nccwpck_require__(13981); const storage_1 = __nccwpck_require__(27577); const uploadUtils_1 = __nccwpck_require__(1786); const google_auth_library_1 = __nccwpck_require__(20810); +const storage_blob_1 = __nccwpck_require__(84100); const versionSalt = '1.0'; function getCacheApiUrl(resource) { var _a; @@ -466,6 +499,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false, if (process.platform === 'win32' && !enableCrossOsArchive) { components.push('windows-only'); } + // Check for mac platforms if enableCrossOsArchive is false + if (process.platform === 'darwin' && !enableCrossOsArchive) { + components.push('mac-only'); + } // Add architecture to cache version if (!enableCrossArchArchive) { components.push(process.arch); @@ -476,16 +513,49 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false, } exports.getCacheVersion = getCacheVersion; function getCacheEntry(key, restoreKeys, paths, options) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive, options === null || options === void 0 ? void 0 : options.enableCrossArchArchive); + const restoreBranches = new Set(); + const restoreRepos = new Set(); + switch (github.context.eventName) { + case 'pull_request': + { + const pullPayload = github.context.payload; + // Adds PR head branch and base branch to restoreBranches + restoreBranches.add(`refs/heads/${(_b = (_a = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _a === void 0 ? void 0 : _a.head) === null || _b === void 0 ? void 0 : _b.ref}`); + restoreBranches.add(`refs/heads/${(_d = (_c = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _c === void 0 ? void 0 : _c.base) === null || _d === void 0 ? void 0 : _d.ref}`); + // Adds default branch to restoreBranches + restoreBranches.add(`refs/heads/${(_e = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.repository) === null || _e === void 0 ? void 0 : _e.default_branch}`); + // If head points to a different repository, add it to restoreRepos. We allow restores from head repos as well. + if (((_h = (_g = (_f = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _f === void 0 ? void 0 : _f.head) === null || _g === void 0 ? void 0 : _g.repo) === null || _h === void 0 ? void 0 : _h.name) !== + ((_j = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.repository) === null || _j === void 0 ? void 0 : _j.name)) { + restoreRepos.add((_m = (_l = (_k = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _k === void 0 ? void 0 : _k.head) === null || _l === void 0 ? void 0 : _l.repo) === null || _m === void 0 ? void 0 : _m.name); + } + } + break; + case 'push': + case 'workflow_dispatch': + { + const pushPayload = github.context.payload; + // Default branch is not in the complete format + // Ref: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push + restoreBranches.add(`refs/heads/${(_o = pushPayload === null || pushPayload === void 0 ? void 0 : pushPayload.repository) === null || _o === void 0 ? void 0 : _o.default_branch}`); + } + break; + default: + break; + } const getCacheRequest = { cache_key: key, restore_keys: restoreKeys, cache_version: version, vcs_repository: getVCSRepository(), vcs_ref: getVCSRef(), - annotations: getAnnotations() + annotations: getAnnotations(), + restore_branches: Array.from(restoreBranches), + restore_repos: Array.from(restoreRepos) }; const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('cache/get'), getCacheRequest); @@ -554,6 +624,11 @@ function downloadCache(provider, archiveLocation, archivePath, gcsToken) { yield (0, downloadUtils_1.downloadCacheMultipartGCP)(storage, archiveLocation, archivePath); break; } + case 'azure_blob': { + const blockBlobClient = new storage_blob_1.BlockBlobClient(archiveLocation); + yield blockBlobClient.downloadToFile(archivePath); + break; + } } }); } @@ -644,8 +719,8 @@ function commitCache(cacheKey, cacheVersion, uploadKey, uploadID, parts) { })); }); } -function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3UploadKey, S3NumberOfChunks, S3PreSignedURLs, GCSAuthToken, GCSBucketName, GCSObjectName) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j; +function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3UploadKey, S3NumberOfChunks, S3PreSignedURLs, GCSAuthToken, GCSBucketName, GCSObjectName, AzureSASURL) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t; return __awaiter(this, void 0, void 0, function* () { const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); @@ -674,7 +749,8 @@ function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3 completedParts.sort((a, b) => a.PartNumber - b.PartNumber); core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion, S3UploadKey, S3UploadId, completedParts); - cacheKeyResponse = (_c = (_b = (_a = commitCacheResponse.result) === null || _a === void 0 ? void 0 : _a.s3) === null || _b === void 0 ? void 0 : _b.cache_key) !== null && _c !== void 0 ? _c : ''; + cacheKeyResponse = + (_f = (_c = (_b = (_a = commitCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cache_entry) === null || _b === void 0 ? void 0 : _b.cache_user_given_key) !== null && _c !== void 0 ? _c : (_e = (_d = commitCacheResponse.result) === null || _d === void 0 ? void 0 : _d.s3) === null || _e === void 0 ? void 0 : _e.cache_key) !== null && _f !== void 0 ? _f : ''; break; } case 'gcs': { @@ -691,7 +767,23 @@ function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3 core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion); cacheKeyResponse = - (_j = (_f = (_e = (_d = commitCacheResponse.result) === null || _d === void 0 ? void 0 : _d.cache_entry) === null || _e === void 0 ? void 0 : _e.cache_user_given_key) !== null && _f !== void 0 ? _f : (_h = (_g = commitCacheResponse.result) === null || _g === void 0 ? void 0 : _g.gcs) === null || _h === void 0 ? void 0 : _h.cache_key) !== null && _j !== void 0 ? _j : ''; + (_m = (_j = (_h = (_g = commitCacheResponse.result) === null || _g === void 0 ? void 0 : _g.cache_entry) === null || _h === void 0 ? void 0 : _h.cache_user_given_key) !== null && _j !== void 0 ? _j : (_l = (_k = commitCacheResponse.result) === null || _k === void 0 ? void 0 : _k.gcs) === null || _l === void 0 ? void 0 : _l.cache_key) !== null && _m !== void 0 ? _m : ''; + break; + } + case 'azure_blob': { + if (!AzureSASURL) { + core.debug(`Azure SAS URL is not set. SAS URL: ${AzureSASURL}`); + throw new Error('Unable to upload cache to Azure Blob. SAS URL is not provided.'); + } + core.debug('Uploading cache'); + const blockBlobClient = new storage_blob_1.BlockBlobClient(AzureSASURL); + yield blockBlobClient.uploadFile(archivePath, { + maxSingleShotSize: 10 * 1024 * 1024 // 10 MB + }); + core.debug('Committing cache'); + commitCacheResponse = yield commitCache(cacheKey, cacheVersion); + cacheKeyResponse = + (_t = (_q = (_p = (_o = commitCacheResponse.result) === null || _o === void 0 ? void 0 : _o.cache_entry) === null || _p === void 0 ? void 0 : _p.cache_user_given_key) !== null && _q !== void 0 ? _q : (_s = (_r = commitCacheResponse.result) === null || _r === void 0 ? void 0 : _r.azure_blob) === null || _s === void 0 ? void 0 : _s.cache_key) !== null && _t !== void 0 ? _t : ''; break; } } @@ -1968,6 +2060,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.multiPartUploadToGCS = exports.uploadFileToS3 = void 0; const core = __importStar(__nccwpck_require__(42186)); const utils = __importStar(__nccwpck_require__(91518)); +const os = __importStar(__nccwpck_require__(22037)); const fs_1 = __importDefault(__nccwpck_require__(57147)); const axios_1 = __importDefault(__nccwpck_require__(88757)); const storage_1 = __nccwpck_require__(27577); @@ -1980,7 +2073,7 @@ function getContentRange(start, end) { return `bytes ${start}-${end}/*`; } function uploadChunk(resourceUrl, openStream, partNumber, start, end) { - var _a; + var _a, _b; return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); // Manually convert the readable stream to a buffer. S3 doesn't allow stream as input @@ -2001,7 +2094,7 @@ function uploadChunk(resourceUrl, openStream, partNumber, start, end) { }; } catch (error) { - throw new Error(`Cache service responded with ${error.status} during upload chunk.`); + throw new Error(`Cache service responded with ${(_b = error.response) === null || _b === void 0 ? void 0 : _b.status} during upload chunk.`); } }); } @@ -2009,26 +2102,39 @@ function uploadFileToS3(preSignedURLs, archivePath) { return __awaiter(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const numberOfChunks = preSignedURLs.length; + let concurrency = 4; + // Adjust concurrency based on the number of cpu cores + if (os.cpus().length > 4) { + concurrency = 8; + } const fd = fs_1.default.openSync(archivePath, 'r'); - core.debug('Awaiting all uploads'); + core.debug(`Awaiting all uploads with concurrency limit of ${concurrency}`); let offset = 0; + const completedParts = []; try { - const completedParts = yield Promise.all(preSignedURLs.map((presignedURL, index) => __awaiter(this, void 0, void 0, function* () { - const chunkSize = Math.ceil(fileSize / numberOfChunks); - const start = offset; - const end = offset + chunkSize - 1; - offset += chunkSize; - return yield uploadChunk(presignedURL, () => fs_1.default - .createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }) - .on('error', error => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), index + 1, start, end); - }))); + for (let i = 0; i < numberOfChunks; i += concurrency) { + const batch = preSignedURLs + .slice(i, i + concurrency) + .map((presignedURL, index) => { + const chunkIndex = i + index; + const chunkSize = Math.ceil(fileSize / numberOfChunks); + const start = offset; + const end = offset + chunkSize - 1; + offset += chunkSize; + return uploadChunk(presignedURL, () => fs_1.default + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), chunkIndex + 1, start, end); + }); + const batchResults = yield Promise.all(batch); + completedParts.push(...batchResults); + } return completedParts; } finally { @@ -2168,6 +2274,259 @@ exports.getDownloadOptions = getDownloadOptions; /***/ }), +/***/ 40333: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(57147); +const os_1 = __nccwpck_require__(22037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } +} +exports.Context = Context; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 28294: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(40333)); +const utils_1 = __nccwpck_require__(30849); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 28189: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(96255)); +const undici_1 = __nccwpck_require__(41773); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 30849: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(40333)); +const Utils = __importStar(__nccwpck_require__(28189)); +// octokit + plugins +const core_1 = __nccwpck_require__(15552); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(16297); +const plugin_paginate_rest_1 = __nccwpck_require__(19655); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map + +/***/ }), + /***/ 31597: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -3231,3621 +3590,5666 @@ exports.SearchState = SearchState; /***/ }), -/***/ 3771: -/***/ ((module, exports) => { +/***/ 24101: +/***/ ((module) => { -exports = module.exports = SemVer +"use strict"; -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } -} else { - debug = function () {} -} - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 - -var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -// The actual regexps go on exports.re -var re = exports.re = [] -var safeRe = exports.safeRe = [] -var src = exports.src = [] -var t = exports.tokens = {} -var R = 0 +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); -function tok (n) { - t[n] = R++ +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; } -var LETTERDASHNUMBER = '[a-zA-Z0-9-]' - -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -var safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] - -function makeSafeRe (value) { - for (var i = 0; i < safeRegexReplacements.length; i++) { - var token = safeRegexReplacements[i][0] - var max = safeRegexReplacements[i][1] - value = value - .split(token + '*').join(token + '{0,' + max + '}') - .split(token + '+').join(token + '{1,' + max + '}') +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; } - return value + return `token ${token}`; } -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -tok('NUMERICIDENTIFIER') -src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' -tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -tok('MAINVERSION') -src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')' - -tok('MAINVERSIONLOOSE') -src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -tok('PRERELEASEIDENTIFIER') -src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -tok('PRERELEASEIDENTIFIERLOOSE') -src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -tok('PRERELEASE') -src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' - -tok('PRERELEASELOOSE') -src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -tok('BUILD') -src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + - '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -tok('FULL') -tok('FULLPLAIN') -src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + - src[t.PRERELEASE] + '?' + - src[t.BUILD] + '?' - -src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -tok('LOOSEPLAIN') -src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + - src[t.PRERELEASELOOSE] + '?' + - src[t.BUILD] + '?' - -tok('LOOSE') -src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' - -tok('GTLT') -src[t.GTLT] = '((?:<|>)?=?)' +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -tok('XRANGEIDENTIFIERLOOSE') -src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -tok('XRANGEIDENTIFIER') -src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -tok('XRANGEPLAIN') -src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:' + src[t.PRERELEASE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' -tok('XRANGEPLAINLOOSE') -src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[t.PRERELEASELOOSE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' +/***/ }), -tok('XRANGE') -src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' -tok('XRANGELOOSE') -src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' +/***/ 15552: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -tok('COERCE') -src[t.COERCE] = '(^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -tok('COERCERTL') -re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') -safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') +"use strict"; -// Tilde ranges. -// Meaning is "reasonably at or greater than" -tok('LONETILDE') -src[t.LONETILDE] = '(?:~>?)' +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -tok('TILDETRIM') -src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' -re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') -safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') -var tildeTrimReplace = '$1~' +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(45030); +var import_before_after_hook = __nccwpck_require__(83682); +var import_request = __nccwpck_require__(77434); +var import_graphql = __nccwpck_require__(80543); +var import_auth_token = __nccwpck_require__(24101); + +// pkg/dist-src/version.js +var VERSION = "5.2.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -tok('TILDE') -src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' -tok('TILDELOOSE') -src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' -// Caret ranges. -// Meaning is "at least and backwards compatible with" -tok('LONECARET') -src[t.LONECARET] = '(?:\\^)' +/***/ }), -tok('CARETTRIM') -src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' -re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') -safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') -var caretTrimReplace = '$1^' +/***/ 62777: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -tok('CARET') -src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' -tok('CARETLOOSE') -src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' +"use strict"; -// A simple gt/lt/eq thing, or just "" to indicate "any version" -tok('COMPARATORLOOSE') -src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' -tok('COMPARATOR') -src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -tok('COMPARATORTRIM') -src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + - '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); -// this one has to use the /g flag -re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') -safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') -var comparatorTrimReplace = '$1$2$3' +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(45030); -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -tok('HYPHENRANGE') -src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAIN] + ')' + - '\\s*$' +// pkg/dist-src/version.js +var VERSION = "9.0.5"; -tok('HYPHENRANGELOOSE') -src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s*$' +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; -// Star ranges basically just allow anything at all. -tok('STAR') -src[t.STAR] = '(<|>)?=?\\s*\\*' +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} - // Replace all greedy whitespace to prevent regex dos issues. These regex are - // used internally via the safeRe object since all inputs in this library get - // normalized first to trim and collapse all extra whitespace. The original - // regexes are exported for userland consumption and lower level usage. A - // future breaking change could export the safer regex only with a note that - // all input should have extra whitespace removed. - safeRe[i] = new RegExp(makeSafeRe(src[i])) - } +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; } -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; } } + return obj; +} - if (version instanceof SemVer) { - return version +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); } - - if (typeof version !== 'string') { - return null + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); } + return mergedOptions; +} - if (version.length > MAX_LENGTH) { - return null +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] - if (!r.test(version)) { - return null +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} - try { - return new SemVer(version, options) - } catch (er) { - return null +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } } + return result; } -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); } - -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); } - -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } - - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) - - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() -} - -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version } - -SemVer.prototype.toString = function () { - return this.version -} - -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) -} - -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} - -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) +function isDefined(value) { + return value !== void 0 && value !== null; } - -SemVer.prototype.compareBuild = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - var i = 0 - do { - var a = this.build[i] - var b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; } - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); } } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; } + return (values.length !== 0 ? operator : "") + values.join(separator); } else { - this.prerelease = [identifier, 0] + return values.join(","); } + } else { + return encodeReserved(literal); } - break - - default: - throw new Error('invalid increment argument: ' + release) + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); } - this.format() - this.raw = this.version - return this } -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; } - - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } } -} - -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; } } - return defaultResult // may be undefined } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); } -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); } -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); } -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} +/***/ }), -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} +/***/ 80543: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -exports.compareBuild = compareBuild -function compareBuild (a, b, loose) { - var versionA = new SemVer(a, loose) - var versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} +"use strict"; -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(77434); +var import_universal_user_agent = __nccwpck_require__(45030); -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(a, b, loose) - }) -} +// pkg/dist-src/version.js +var VERSION = "7.1.0"; -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(b, a, loose) - }) -} +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(77434); -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(77434); -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); } +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); } -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); } -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); } +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError('Invalid operator: ' + op) - } -} - -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } +/***/ }), - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } +/***/ 19655: +/***/ ((module) => { - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) +"use strict"; - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); - debug('comp', this) -} - -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var m = comp.match(r) +// pkg/dist-src/version.js +var VERSION = "9.2.1"; - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; } + response.data.total_count = totalCount; + return response; } -Comparator.prototype.toString = function () { - return this.value +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; } -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; } - - return cmp(version, this.operator, this.semver, this.options) + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); } - -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; } - } - - var rangeTmp - - if (this.operator === '') { - if (this.value === '') { - return true + let earlyExit = false; + function done() { + earlyExit = true; } - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - if (comp.value === '') { - return true + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; } - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan + return gather(octokit, results, iterator2, mapFn); + }); } -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); - // First, split based on boolean or || - this.set = this.raw.split('||').map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/organization-roles/{role_id}/teams", + "GET /orgs/{org}/organization-roles/{role_id}/users", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + this.raw) +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; } - - this.format() -} - -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} - -Range.prototype.toString = function () { - return this.range } -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - - return set + }; } +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - return this.set.some(function (thisComparators) { - return ( - isSatisfiable(thisComparators, options) && - range.set.some(function (rangeComparators) { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every(function (thisComparator) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) -} +/***/ }), -// take a set of comparators and determine whether there -// exists a version which can satisfy it -function isSatisfiable (comparators, options) { - var result = true - var remainingComparators = comparators.slice() - var testComparator = remainingComparators.pop() +/***/ 16297: +/***/ ((module) => { - while (result && remainingComparators.length) { - result = remainingComparators.every(function (otherComparator) { - return testComparator.intersects(otherComparator, options) - }) +"use strict"; - testComparator = remainingComparators.pop() +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - return result -} - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} - -function replaceTilde (comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} - -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } - } - - debug('caret return', ret) - return ret - }) -} +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} +// pkg/dist-src/version.js +var VERSION = "10.4.1"; -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' - } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" } - - ret = gtlt + M + '.' + m + '.' + p + pr - } else if (xm) { - ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr - } else if (xp) { - ret = '>=' + M + '.' + m + '.0' + pr + - ' <' + M + '.' + (+m + 1) + '.0' + pr - } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(safeRe[t.STAR], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() -} - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false -} - -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} - -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" } - } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" } - } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} - -exports.coerce = coerce -function coerce (version, options) { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - var match = null - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - var next - while ((next = safeRe[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + assignTeamToOrgRole: [ + "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + assignUserToOrgRole: [ + "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteCustomOrganizationRole: [ + "DELETE /orgs/{org}/organization-roles/{role_id}" + ], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], + listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], + listOrgRoles: ["GET /orgs/{org}/organization-roles"], + listOrganizationFineGrainedPermissions: [ + "GET /orgs/{org}/organization-fine-grained-permissions" + ], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + patchCustomOrganizationRole: [ + "PATCH /orgs/{org}/organization-roles/{role_id}" + ], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + revokeAllOrgRolesTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" + ], + revokeAllOrgRolesUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}" + ], + revokeOrgRoleTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + revokeOrgRoleUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - safeRe[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(match[2] + - '.' + (match[3] || '0') + - '.' + (match[4] || '0'), options) -} - - -/***/ }), - -/***/ 94138: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var v1 = __nccwpck_require__(61610); -var v4 = __nccwpck_require__(8373); - -var uuid = v4; -uuid.v1 = v1; -uuid.v4 = v4; - -module.exports = uuid; - - -/***/ }), - -/***/ 65694: -/***/ ((module) => { - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); -} - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 34069: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(6113); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - - -/***/ }), - -/***/ 61610: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(34069); -var bytesToUuid = __nccwpck_require__(65694); - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html - -var _nodeId; -var _clockseq; - -// Previous uuid creation time -var _lastMSecs = 0; -var _lastNSecs = 0; - -// See https://github.com/uuidjs/uuid for API details -function v1(options, buf, offset) { - var i = buf && offset || 0; - var b = buf || []; - - options = options || {}; - var node = options.node || _nodeId; - var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; - - // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - if (node == null || clockseq == null) { - var seedBytes = rng(); - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [ - seedBytes[0] | 0x01, - seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] - ]; + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + cancelPagesDeployment: [ + "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateCustomPropertiesValues: [ + "PATCH /repos/{owner}/{repo}/properties/values" + ], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesDeployment: [ + "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" + ], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createFork: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" + ], + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); } - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); } + return requestWithDefaults(...args); } + return Object.assign(withDecorations, requestWithDefaults); +} - // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); - // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; - // Time since last uuid creation (in msecs) - var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; +/***/ }), - // Per 4.2.1.2, Bump clockseq on clock regression - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } +/***/ 21366: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } +"use strict"; - // Per 4.2.1.2 Throw error if too many uuids are requested - if (nsecs >= 10000) { - throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(58932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; - - // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - msecs += 12219292800000; - - // `time_low` - var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; - - // `time_mid` - var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; - // `time_high_and_version` - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - b[i++] = tmh >>> 16 & 0xff; +/***/ }), - // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - b[i++] = clockseq >>> 8 | 0x80; +/***/ 77434: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // `clock_seq_low` - b[i++] = clockseq & 0xff; +"use strict"; - // `node` - for (var n = 0; n < 6; ++n) { - b[i + n] = node[n]; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - return buf ? buf : bytesToUuid(b); -} +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(62777); +var import_universal_user_agent = __nccwpck_require__(45030); -module.exports = v1; +// pkg/dist-src/version.js +var VERSION = "8.4.0"; +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} -/***/ }), +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(21366); -/***/ 8373: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} -var rng = __nccwpck_require__(34069); -var bytesToUuid = __nccwpck_require__(65694); +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; +} -function v4(options, buf, offset) { - var i = buf && offset || 0; +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } - options = options || {}; +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); - var rnds = options.random || (options.rng || rng)(); - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; +/***/ }), - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } +/***/ 3771: +/***/ ((module, exports) => { - return buf || bytesToUuid(rnds); +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} } -module.exports = v4; +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 -/***/ }), +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 -/***/ 87351: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 -"use strict"; +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(22037)); -const utils_1 = __nccwpck_require__(5278); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); +function tok (n) { + t[n] = R++ } -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value } -//# sourceMappingURL=command.js.map -/***/ }), +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. -/***/ 42186: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. -"use strict"; +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(87351); -const file_command_1 = __nccwpck_require__(717); -const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(22037)); -const path = __importStar(__nccwpck_require__(71017)); -const oidc_utils_1 = __nccwpck_require__(98041); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); - } - command_1.issueCommand('set-env', { name }, convertedVal); -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueFileCommand('PATH', inputPath); - } - else { - command_1.issueCommand('add-path', {}, inputPath); - } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. - * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. - * Returns an empty string if the value is not defined. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - if (options && options.trimWhitespace === false) { - return val; - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Gets the values of an multiline input. Each value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string[] - * - */ -function getMultilineInput(name, options) { - const inputs = getInput(name, options) - .split('\n') - .filter(x => x !== ''); - if (options && options.trimWhitespace === false) { - return inputs; - } - return inputs.map(input => input.trim()); -} -exports.getMultilineInput = getMultilineInput; -/** - * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. - * Support boolean input list: `true | True | TRUE | false | False | FALSE` . - * The return value is also in boolean type. - * ref: https://yaml.org/spec/1.2/spec.html#id2804923 - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns boolean - */ -function getBooleanInput(name, options) { - const trueValue = ['true', 'True', 'TRUE']; - const falseValue = ['false', 'False', 'FALSE']; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + - `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); -} -exports.getBooleanInput = getBooleanInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - const filePath = process.env['GITHUB_OUTPUT'] || ''; - if (filePath) { - return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); - } - process.stdout.write(os.EOL); - command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function error(message, properties = {}) { - command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds a warning issue - * @param message warning issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function warning(message, properties = {}) { - command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Adds a notice issue - * @param message notice issue message. Errors will be converted to string via toString() - * @param properties optional properties to add to the annotation. - */ -function notice(message, properties = {}) { - command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); -} -exports.notice = notice; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - const filePath = process.env['GITHUB_STATE'] || ''; - if (filePath) { - return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); - } - command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -function getIDToken(aud) { - return __awaiter(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); - }); -} -exports.getIDToken = getIDToken; -/** - * Summary exports - */ -var summary_1 = __nccwpck_require__(81327); -Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); -/** - * @deprecated use core.summary - */ -var summary_2 = __nccwpck_require__(81327); -Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); -/** - * Path exports - */ -var path_utils_1 = __nccwpck_require__(2981); -Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); -Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); -Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); -//# sourceMappingURL=core.js.map +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. -/***/ }), +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' -/***/ 717: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +// ## Main Version +// Three dot-separated numeric identifiers. -"use strict"; +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' -// For internal use, subject to change. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(57147)); -const os = __importStar(__nccwpck_require__(22037)); -const uuid_1 = __nccwpck_require__(78974); -const utils_1 = __nccwpck_require__(5278); -function issueFileCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueFileCommand = issueFileCommand; -function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - const convertedValue = utils_1.toCommandValue(value); - // These should realistically never happen, but just in case someone finds a - // way to exploit uuid generation let's not allow keys or values that contain - // the delimiter. - if (key.includes(delimiter)) { - throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); - } - if (convertedValue.includes(delimiter)) { - throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); - } - return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; -} -exports.prepareKeyValueMessage = prepareKeyValueMessage; -//# sourceMappingURL=file-command.js.map +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' -/***/ }), +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. -/***/ 98041: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' -"use strict"; +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(96255); -const auth_1 = __nccwpck_require__(35526); -const core_1 = __nccwpck_require__(42186); -class OidcClient { - static createHttpClient(allowRetry = true, maxRetry = 10) { - const requestOptions = { - allowRetries: allowRetry, - maxRetries: maxRetry - }; - return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); - } - static getRequestToken() { - const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); - } - return token; - } - static getIDTokenUrl() { - const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); - } - return runtimeUrl; - } - static getCall(id_token_url) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const httpclient = OidcClient.createHttpClient(); - const res = yield httpclient - .getJson(id_token_url) - .catch(error => { - throw new Error(`Failed to get ID Token. \n - Error Code : ${error.statusCode}\n - Error Message: ${error.message}`); - }); - const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; - if (!id_token) { - throw new Error('Response json body do not have ID Token field'); - } - return id_token; - }); - } - static getIDToken(audience) { - return __awaiter(this, void 0, void 0, function* () { - try { - // New ID Token is requested from action service - let id_token_url = OidcClient.getIDTokenUrl(); - if (audience) { - const encodedAudience = encodeURIComponent(audience); - id_token_url = `${id_token_url}&audience=${encodedAudience}`; - } - core_1.debug(`ID token url is ${id_token_url}`); - const id_token = yield OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); - return id_token; - } - catch (error) { - throw new Error(`Error message: ${error.message}`); - } - }); - } -} -exports.OidcClient = OidcClient; -//# sourceMappingURL=oidc-utils.js.map +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. -/***/ }), +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' -/***/ 2981: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' -"use strict"; +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(71017)); -/** - * toPosixPath converts the given path to the posix form. On Windows, \\ will be - * replaced with /. - * - * @param pth. Path to transform. - * @return string Posix path. - */ -function toPosixPath(pth) { - return pth.replace(/[\\]/g, '/'); -} -exports.toPosixPath = toPosixPath; -/** - * toWin32Path converts the given path to the win32 form. On Linux, / will be - * replaced with \\. - * - * @param pth. Path to transform. - * @return string Win32 path. - */ -function toWin32Path(pth) { - return pth.replace(/[/]/g, '\\'); -} -exports.toWin32Path = toWin32Path; -/** - * toPlatformPath converts the given path to a platform-specific path. It does - * this by replacing instances of / and \ with the platform-specific path - * separator. - * - * @param pth The path to platformize. - * @return string The platform-specific path. - */ -function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path.sep); -} -exports.toPlatformPath = toPlatformPath; -//# sourceMappingURL=path-utils.js.map +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' -/***/ }), +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. -/***/ 81327: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' -"use strict"; +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(22037); -const fs_1 = __nccwpck_require__(57147); -const { access, appendFile, writeFile } = fs_1.promises; -exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; -exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; -class Summary { - constructor() { - this._buffer = ''; - } - /** - * Finds the summary file path from the environment, rejects if env var is not found or file does not exist - * Also checks r/w permissions. - * - * @returns step summary file path - */ - filePath() { - return __awaiter(this, void 0, void 0, function* () { - if (this._filePath) { - return this._filePath; - } - const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; - if (!pathFromEnv) { - throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); - } - try { - yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); - } - catch (_a) { - throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); - } - this._filePath = pathFromEnv; - return this._filePath; - }); - } - /** - * Wraps content in an HTML tag, adding any HTML attributes - * - * @param {string} tag HTML tag to wrap - * @param {string | null} content content within the tag - * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add - * - * @returns {string} content wrapped in HTML element - */ - wrap(tag, content, attrs = {}) { - const htmlAttrs = Object.entries(attrs) - .map(([key, value]) => ` ${key}="${value}"`) - .join(''); - if (!content) { - return `<${tag}${htmlAttrs}>`; - } - return `<${tag}${htmlAttrs}>${content}`; - } - /** - * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. - * - * @param {SummaryWriteOptions} [options] (optional) options for write operation - * - * @returns {Promise

} summary instance - */ - write(options) { - return __awaiter(this, void 0, void 0, function* () { - const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); - const filePath = yield this.filePath(); - const writeFunc = overwrite ? writeFile : appendFile; - yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); - return this.emptyBuffer(); - }); - } - /** - * Clears the summary buffer and wipes the summary file - * - * @returns {Summary} summary instance - */ - clear() { - return __awaiter(this, void 0, void 0, function* () { - return this.emptyBuffer().write({ overwrite: true }); - }); - } - /** - * Returns the current summary buffer as a string - * - * @returns {string} string of summary buffer - */ - stringify() { - return this._buffer; - } - /** - * If the summary buffer is empty - * - * @returns {boolen} true if the buffer is empty - */ - isEmptyBuffer() { - return this._buffer.length === 0; - } - /** - * Resets the summary buffer without writing to summary file - * - * @returns {Summary} summary instance - */ - emptyBuffer() { - this._buffer = ''; - return this; - } - /** - * Adds raw text to the summary buffer - * - * @param {string} text content to add - * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) - * - * @returns {Summary} summary instance - */ - addRaw(text, addEOL = false) { - this._buffer += text; - return addEOL ? this.addEOL() : this; - } - /** - * Adds the operating system-specific end-of-line marker to the buffer - * - * @returns {Summary} summary instance - */ - addEOL() { - return this.addRaw(os_1.EOL); - } - /** - * Adds an HTML codeblock to the summary buffer - * - * @param {string} code content to render within fenced code block - * @param {string} lang (optional) language to syntax highlight code - * - * @returns {Summary} summary instance - */ - addCodeBlock(code, lang) { - const attrs = Object.assign({}, (lang && { lang })); - const element = this.wrap('pre', this.wrap('code', code), attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML list to the summary buffer - * - * @param {string[]} items list of items to render - * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) - * - * @returns {Summary} summary instance - */ - addList(items, ordered = false) { - const tag = ordered ? 'ol' : 'ul'; - const listItems = items.map(item => this.wrap('li', item)).join(''); - const element = this.wrap(tag, listItems); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML table to the summary buffer - * - * @param {SummaryTableCell[]} rows table rows - * - * @returns {Summary} summary instance - */ - addTable(rows) { - const tableBody = rows - .map(row => { - const cells = row - .map(cell => { - if (typeof cell === 'string') { - return this.wrap('td', cell); - } - const { header, data, colspan, rowspan } = cell; - const tag = header ? 'th' : 'td'; - const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); - return this.wrap(tag, data, attrs); - }) - .join(''); - return this.wrap('tr', cells); - }) - .join(''); - const element = this.wrap('table', tableBody); - return this.addRaw(element).addEOL(); - } - /** - * Adds a collapsable HTML details element to the summary buffer - * - * @param {string} label text for the closed state - * @param {string} content collapsable content - * - * @returns {Summary} summary instance - */ - addDetails(label, content) { - const element = this.wrap('details', this.wrap('summary', label) + content); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML image tag to the summary buffer - * - * @param {string} src path to the image you to embed - * @param {string} alt text description of the image - * @param {SummaryImageOptions} options (optional) addition image attributes - * - * @returns {Summary} summary instance - */ - addImage(src, alt, options) { - const { width, height } = options || {}; - const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); - const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML section heading element - * - * @param {string} text heading text - * @param {number | string} [level=1] (optional) the heading level, default: 1 - * - * @returns {Summary} summary instance - */ - addHeading(text, level) { - const tag = `h${level}`; - const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) - ? tag - : 'h1'; - const element = this.wrap(allowedTag, text); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML thematic break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addSeparator() { - const element = this.wrap('hr', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML line break (
) to the summary buffer - * - * @returns {Summary} summary instance - */ - addBreak() { - const element = this.wrap('br', null); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML blockquote to the summary buffer - * - * @param {string} text quote text - * @param {string} cite (optional) citation url - * - * @returns {Summary} summary instance - */ - addQuote(text, cite) { - const attrs = Object.assign({}, (cite && { cite })); - const element = this.wrap('blockquote', text, attrs); - return this.addRaw(element).addEOL(); - } - /** - * Adds an HTML anchor tag to the summary buffer - * - * @param {string} text link text/content - * @param {string} href hyperlink - * - * @returns {Summary} summary instance - */ - addLink(text, href) { - const element = this.wrap('a', text, { href }); - return this.addRaw(element).addEOL(); - } -} -const _summary = new Summary(); -/** - * @deprecated use `core.summary` - */ -exports.markdownSummary = _summary; -exports.summary = _summary; -//# sourceMappingURL=summary.js.map - -/***/ }), - -/***/ 5278: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toCommandProperties = exports.toCommandValue = void 0; -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -/** - * - * @param annotationProperties - * @returns The command properties to send with the actual annotation command - * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 - */ -function toCommandProperties(annotationProperties) { - if (!Object.keys(annotationProperties).length) { - return {}; - } - return { - title: annotationProperties.title, - file: annotationProperties.file, - line: annotationProperties.startLine, - endLine: annotationProperties.endLine, - col: annotationProperties.startColumn, - endColumn: annotationProperties.endColumn - }; -} -exports.toCommandProperties = toCommandProperties; -//# sourceMappingURL=utils.js.map +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' -/***/ }), +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' -/***/ 78974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' -"use strict"; +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' -var _v = _interopRequireDefault(__nccwpck_require__(81595)); +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' -var _v2 = _interopRequireDefault(__nccwpck_require__(26993)); +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' -var _v3 = _interopRequireDefault(__nccwpck_require__(51472)); +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' -var _v4 = _interopRequireDefault(__nccwpck_require__(16217)); +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') -var _nil = _interopRequireDefault(__nccwpck_require__(32381)); +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' -var _version = _interopRequireDefault(__nccwpck_require__(40427)); +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' -var _parse = _interopRequireDefault(__nccwpck_require__(26385)); +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') +var caretTrimReplace = '$1^' -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' -/***/ }), +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' -/***/ 5842: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' -"use strict"; +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) } - - return _crypto.default.createHash('md5').update(bytes).digest(); } -var _default = md5; -exports["default"] = _default; +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -/***/ }), + if (version instanceof SemVer) { + return version + } -/***/ 32381: -/***/ ((__unused_webpack_module, exports) => { + if (typeof version !== 'string') { + return null + } -"use strict"; + if (version.length > MAX_LENGTH) { + return null + } + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] + if (!r.test(version)) { + return null + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} -/***/ }), +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} -/***/ 26385: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} -"use strict"; +exports.SemVer = SemVer +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) } - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + this.raw = version - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } -var _default = parse; -exports["default"] = _default; + this.build = m[5] ? m[5].split('.') : [] + this.format() +} -/***/ }), +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} -/***/ 86230: -/***/ ((__unused_webpack_module, exports) => { +SemVer.prototype.toString = function () { + return this.version +} -"use strict"; +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + return this.compareMain(other) || this.comparePre(other) +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -/***/ }), + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} -/***/ 9784: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -"use strict"; + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break -let poolPtr = rnds8Pool.length; + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } - poolPtr = 0; + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null } +} - return rnds8Pool.slice(poolPtr, poolPtr += 16); +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } } -/***/ }), +exports.compareIdentifiers = compareIdentifiers -/***/ 38844: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) -"use strict"; + if (anum && bnum) { + a = +a + b = +b + } + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} - return _crypto.default.createHash('sha1').update(bytes).digest(); +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) } -var _default = sha1; -exports["default"] = _default; +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} -/***/ }), +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} -/***/ 61458: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} -"use strict"; +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 } -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b - return uuid; -} + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b -var _default = stringify; -exports["default"] = _default; + case '': + case '=': + case '==': + return eq(a, b, loose) -/***/ }), + case '!=': + return neq(a, b, loose) -/***/ 81595: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + case '>': + return gt(a, b, loose) -"use strict"; + case '>=': + return gte(a, b, loose) + case '<': + return lt(a, b, loose) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + case '<=': + return lte(a, b, loose) -var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + default: + throw new TypeError('Invalid operator: ' + op) + } +} -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } -let _clockseq; // Previous uuid creation time + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + debug('comp', this) +} -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var m = comp.match(r) - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} +Comparator.prototype.toString = function () { + return this.value +} - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + if (this.semver === ANY || version === ANY) { + return true + } - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval + return cmp(version, this.operator, this.semver, this.options) +} +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + var rangeTmp - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) - msecs += 12219292800000; // `time_low` + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + if (range instanceof Comparator) { + return new Range(range.value, options) + } - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + if (!(this instanceof Range)) { + return new Range(range, options) + } - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease - b[i++] = clockseq & 0xff; // `node` + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) } - return buf || (0, _stringify.default)(b); + this.format() } -var _default = v1; -exports["default"] = _default; +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} -/***/ }), +Range.prototype.toString = function () { + return this.range +} -/***/ 26993: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) -"use strict"; + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + // normalize spaces + range = range.split(/\s+/).join(' ') -var _v = _interopRequireDefault(__nccwpck_require__(65920)); + // At this point, the range is completely trimmed and + // ready to be split into comparators. -var _md = _interopRequireDefault(__nccwpck_require__(5842)); + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return set +} -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } -/***/ }), + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} -/***/ 65920: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() -"use strict"; + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + testComparator = remainingComparators.pop() + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; + return result +} -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} -var _parse = _interopRequireDefault(__nccwpck_require__(26385)); +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} - const bytes = []; +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } - return bytes; + debug('tilde return', ret) + return ret + }) } -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } } - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; + debug('caret return', ret) + return ret + }) +} - if (buf) { - offset = offset || 0; +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp - return buf; + if (gtlt === '=' && anyX) { + gtlt = '' } - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + debug('xRange return', ret) - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; + return ret + }) } -/***/ }), +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[t.STAR], '') +} -/***/ 51472: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } -"use strict"; + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + return (from + ' ' + to).trim() +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } -var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } -var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } -function v4(options, buf, offset) { - options = options || {}; + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + // Version has a -pre, but it's not one of the ones we like. + return false + } - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + return true +} - if (buf) { - offset = offset || 0; +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } } + }) + return max +} - return buf; +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null } - - return (0, _stringify.default)(rnds); + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min } -var _default = v4; -exports["default"] = _default; +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) -/***/ }), + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } -/***/ 16217: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } -"use strict"; + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + if (minver && range.test(minver)) { + return minver + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + return null +} -var _v = _interopRequireDefault(__nccwpck_require__(65920)); +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} -var _sha = _interopRequireDefault(__nccwpck_require__(38844)); +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) -/***/ }), + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } -/***/ 92609: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } -"use strict"; + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + var high = null + var low = null -var _regex = _interopRequireDefault(__nccwpck_require__(86230)); + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true } -var _default = validate; -exports["default"] = _default; - -/***/ }), +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} -/***/ 40427: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} -"use strict"; +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + if (typeof version === 'number') { + version = String(version) + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (typeof version !== 'string') { + return null + } -var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + options = options || {} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); + if (match === null) { + return null } - return parseInt(uuid.substr(14, 1), 16); + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) } -var _default = version; -exports["default"] = _default; /***/ }), -/***/ 71514: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 94138: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var v1 = __nccwpck_require__(61610); +var v4 = __nccwpck_require__(8373); + +var uuid = v4; +uuid.v1 = v1; +uuid.v4 = v4; + +module.exports = uuid; + + +/***/ }), + +/***/ 65694: +/***/ ((module) => { -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getExecOutput = exports.exec = void 0; -const string_decoder_1 = __nccwpck_require__(71576); -const tr = __importStar(__nccwpck_require__(88159)); /** - * Exec a command. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ -function exec(commandLine, args, options) { - return __awaiter(this, void 0, void 0, function* () { - const commandArgs = tr.argStringToArray(commandLine); - if (commandArgs.length === 0) { - throw new Error(`Parameter 'commandLine' cannot be null or empty.`); - } - // Path to tool to execute should be first arg - const toolPath = commandArgs[0]; - args = commandArgs.slice(1).concat(args || []); - const runner = new tr.ToolRunner(toolPath, args, options); - return runner.exec(); - }); +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); } -exports.exec = exec; + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 34069: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __nccwpck_require__(6113); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 61610: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(34069); +var bytesToUuid = __nccwpck_require__(65694); + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; +var _clockseq; + +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; + +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; + + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : bytesToUuid(b); +} + +module.exports = v1; + + +/***/ }), + +/***/ 8373: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(34069); +var bytesToUuid = __nccwpck_require__(65694); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 87351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const utils_1 = __nccwpck_require__(5278); /** - * Exec a command and get the output. - * Output will be streamed to the live console. - * Returns promise with the exit code and collected stdout and stderr + * Commands * - * @param commandLine command to execute (can include additional args). Must be correctly escaped. - * @param args optional arguments for tool. Escaping is handled by the lib. - * @param options optional exec options. See ExecOptions - * @returns Promise exit code, stdout, and stderr + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value */ -function getExecOutput(commandLine, args, options) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - let stdout = ''; - let stderr = ''; - //Using string decoder covers the case where a mult-byte character is split - const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); - const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); - const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; - const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; - const stdErrListener = (data) => { - stderr += stderrDecoder.write(data); - if (originalStdErrListener) { - originalStdErrListener(data); - } - }; - const stdOutListener = (data) => { - stdout += stdoutDecoder.write(data); - if (originalStdoutListener) { - originalStdoutListener(data); +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } } - }; - const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); - const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); - //flush any remaining characters - stdout += stdoutDecoder.end(); - stderr += stderrDecoder.end(); - return { - exitCode, - stdout, - stderr - }; - }); + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } } -exports.getExecOutput = getExecOutput; -//# sourceMappingURL=exec.js.map +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map /***/ }), -/***/ 88159: +/***/ 42186: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -6879,663 +9283,321 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.argStringToArray = exports.ToolRunner = void 0; +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(87351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(22037)); -const events = __importStar(__nccwpck_require__(82361)); -const child = __importStar(__nccwpck_require__(32081)); const path = __importStar(__nccwpck_require__(71017)); -const io = __importStar(__nccwpck_require__(47351)); -const ioUtil = __importStar(__nccwpck_require__(81962)); -const timers_1 = __nccwpck_require__(39512); -/* eslint-disable @typescript-eslint/unbound-method */ -const IS_WINDOWS = process.platform === 'win32'; -/* - * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. +const oidc_utils_1 = __nccwpck_require__(98041); +/** + * The code to exit an action */ -class ToolRunner extends events.EventEmitter { - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); - } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; - } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); - } - } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool - if (IS_WINDOWS) { - // Windows + cmd file - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows + verbatim - else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows (regular) - else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; - } - } - } - else { - // OSX/Linux - this can likely be improved with some form of quoting. - // creating processes on Unix is fundamentally different than Windows. - // on Unix, execvp() takes an arg array. - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - return cmd; - } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - // the rest of the string ... - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); - } - return s; - } - catch (err) { - // streaming lines to console is best effort. Don't fail a build. - this._debug(`error processing line. Failed with error ${err}`); - return ''; - } +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env['COMSPEC'] || 'cmd.exe'; - } - } - return this.toolPath; + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += ' '; - argline += options.windowsVerbatimArguments - ? a - : this._windowsQuoteCmdArg(a); - } - argline += '"'; - return [argline]; - } - } - return this.args; + else { + command_1.issueCommand('add-path', {}, inputPath); } - _endsWith(str, end) { - return str.endsWith(end); + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return (this._endsWith(upperToolPath, '.CMD') || - this._endsWith(upperToolPath, '.BAT')); - } - _windowsQuoteCmdArg(arg) { - // for .exe, apply the normal quoting rules that libuv applies - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - // otherwise apply quoting rules specific to the cmd.exe command line parser. - // the libuv rules are generic and are not designed specifically for cmd.exe - // command line parser. - // - // for a detailed description of the cmd.exe command line parser, refer to - // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 - // need quotes for empty arg - if (!arg) { - return '""'; - } - // determine whether the arg needs to be quoted - const cmdSpecialChars = [ - ' ', - '\t', - '&', - '(', - ')', - '[', - ']', - '{', - '}', - '^', - '=', - ';', - '!', - "'", - '+', - ',', - '`', - '~', - '|', - '<', - '>', - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some(x => x === char)) { - needsQuotes = true; - break; - } - } - // short-circuit if quotes not needed - if (!needsQuotes) { - return arg; - } - // the following quoting rules are very similar to the rules that by libuv applies. - // - // 1) wrap the string in quotes - // - // 2) double-up quotes - i.e. " => "" - // - // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately - // doesn't work well with a cmd.exe command line. - // - // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. - // for example, the command line: - // foo.exe "myarg:""my val""" - // is parsed by a .NET console app into an arg array: - // [ "myarg:\"my val\"" ] - // which is the same end result when applying libuv quoting rules. although the actual - // command line from libuv quoting rules would look like: - // foo.exe "myarg:\"my val\"" - // - // 3) double-up slashes that precede a quote, - // e.g. hello \world => "hello \world" - // hello\"world => "hello\\""world" - // hello\\"world => "hello\\\\""world" - // hello world\ => "hello world\\" - // - // technically this is not required for a cmd.exe command line, or the batch argument parser. - // the reasons for including this as a .cmd quoting rule are: - // - // a) this is optimized for the scenario where the argument is passed from the .cmd file to an - // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. - // - // b) it's what we've been doing previously (by deferring to node default behavior) and we - // haven't heard any complaints about that aspect. - // - // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be - // escaped when used on the command line directly - even though within a .cmd file % can be escaped - // by using %%. - // - // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts - // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. - // - // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would - // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the - // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args - // to an external program. - // - // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. - // % can be escaped within a .cmd file. - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; // double the slash - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; // double the quote - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _uvQuoteCmdArg(arg) { - // Tool runner wraps child_process.spawn() and needs to apply the same quoting as - // Node in certain cases where the undocumented spawn option windowsVerbatimArguments - // is used. - // - // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, - // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), - // pasting copyright notice from Node within this function: - // - // Copyright Joyent, Inc. and other Node contributors. All rights reserved. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to - // deal in the Software without restriction, including without limitation the - // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - // sell copies of the Software, and to permit persons to whom the Software is - // furnished to do so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in - // all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - // IN THE SOFTWARE. - if (!arg) { - // Need double quotation for empty argument - return '""'; - } - if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { - // No quotation needed - return arg; - } - if (!arg.includes('"') && !arg.includes('\\')) { - // No embedded double quotes or backslashes, so I can just wrap - // quote marks around the whole thing. - return `"${arg}"`; - } - // Expected input/output: - // input : hello"world - // output: "hello\"world" - // input : hello""world - // output: "hello\"\"world" - // input : hello\world - // output: hello\world - // input : hello\\world - // output: hello\\world - // input : hello\"world - // output: "hello\\\"world" - // input : hello\\"world - // output: "hello\\\\\"world" - // input : hello world\ - // output: "hello world\\" - note the comment in libuv actually reads "hello world\" - // but it appears the comment is wrong, it should be "hello world\\" - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '\\'; - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 10000 - }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; - } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result['windowsVerbatimArguments'] = - options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; - } - /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number - */ - exec() { - return __awaiter(this, void 0, void 0, function* () { - // root the tool path if it is unrooted and contains relative pathing - if (!ioUtil.isRooted(this.toolPath) && - (this.toolPath.includes('/') || - (IS_WINDOWS && this.toolPath.includes('\\')))) { - // prefer options.cwd if it is specified, however options.cwd may also need to be rooted - this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - // if the tool is only a file name, then resolve it from the PATH - // otherwise verify it exists (add extension on Windows if necessary) - this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - this._debug(`exec tool: ${this.toolPath}`); - this._debug('arguments:'); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on('debug', (message) => { - this._debug(message); - }); - if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { - return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); - } - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - let stdbuffer = ''; - if (cp.stdout) { - cp.stdout.on('data', (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); - } - let errbuffer = ''; - if (cp.stderr) { - cp.stderr.on('data', (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && - optionsNonNull.errStream && - optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr - ? optionsNonNull.errStream - : optionsNonNull.outStream; - s.write(data); - } - errbuffer = this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on('error', (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on('exit', (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on('close', (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on('done', (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit('stdline', stdbuffer); - } - if (errbuffer.length > 0) { - this.emit('errline', errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } - else { - resolve(exitCode); - } - }); - if (this.options.input) { - if (!cp.stdin) { - throw new Error('child process missing stdin'); - } - cp.stdin.end(this.options.input); - } - })); - }); + if (options && options.trimWhitespace === false) { + return val; } + return val.trim(); } -exports.ToolRunner = ToolRunner; +exports.getInput = getInput; /** - * Convert an arg string to an array of args. Handles escaping + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] * - * @param argString string of arguments - * @returns string[] array of arguments */ -function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ''; - function append(c) { - // we only escape double quotes. - if (escaped && c !== '"') { - arg += '\\'; - } - arg += c; - escaped = false; - } - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; - } - else { - append(c); - } - continue; - } - if (c === '\\' && escaped) { - append(c); - continue; - } - if (c === '\\' && inQuotes) { - escaped = true; - continue; - } - if (c === ' ' && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ''; - } - continue; - } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; } - return args; + return inputs.map(input => input.trim()); } -exports.argStringToArray = argStringToArray; -class ExecState extends events.EventEmitter { - constructor(options, toolPath) { - super(); - this.processClosed = false; // tracks whether the process has exited and stdio is closed - this.processError = ''; - this.processExitCode = 0; - this.processExited = false; // tracks whether the process has exited - this.processStderr = false; // tracks whether stderr was written to - this.delay = 10000; // 10 seconds - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error('toolPath must not be empty'); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; - } - } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } - else if (this.processExited) { - this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); - } - } - _debug(message) { - this.emit('debug', message); - } - _setResult() { - // determine whether there is an error - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } - else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); - } - else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); - } - } - // clear the timeout - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit('done', error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / - 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); } -//# sourceMappingURL=toolrunner.js.map - -/***/ }), - -/***/ 74087: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __nccwpck_require__(57147); -const os_1 = __nccwpck_require__(22037); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; + finally { + endGroup(); } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); } -exports.Context = Context; -//# sourceMappingURL=context.js.map +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(81327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(81327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +//# sourceMappingURL=core.js.map /***/ }), -/***/ 95438: +/***/ 717: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +// For internal use, subject to change. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); @@ -7556,76 +9618,130 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(74087)); -const utils_1 = __nccwpck_require__(73030); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options, ...additionalPlugins) { - const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(57147)); +const os = __importStar(__nccwpck_require__(22037)); +const uuid_1 = __nccwpck_require__(78974); +const utils_1 = __nccwpck_require__(5278); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); } -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map /***/ }), -/***/ 47914: +/***/ 98041: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(96255)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(96255); +const auth_1 = __nccwpck_require__(35526); +const core_1 = __nccwpck_require__(42186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map /***/ }), -/***/ 73030: +/***/ 2981: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -7650,43 +9766,47 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(74087)); -const Utils = __importStar(__nccwpck_require__(47914)); -// octokit + plugins -const core_1 = __nccwpck_require__(76762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(83044); -const plugin_paginate_rest_1 = __nccwpck_require__(64193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -exports.defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } -}; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(71017)); /** - * Convience function to correctly format Octokit Options to pass into the constructor. + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set + * @param pth. Path to transform. + * @return string Posix path. */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); } -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map /***/ }), -/***/ 28090: +/***/ 81327: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -7701,1284 +9821,982 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.hashFiles = exports.create = void 0; -const internal_globber_1 = __nccwpck_require__(28298); -const internal_hash_files_1 = __nccwpck_require__(2448); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); -} -exports.create = create; -/** - * Computes the sha256 hash of a glob - * - * @param patterns Patterns separated by newlines - * @param currentWorkspace Workspace used when matching files - * @param options Glob options - * @param verbose Enables verbose logging - */ -function hashFiles(patterns, currentWorkspace = '', options, verbose = false) { - return __awaiter(this, void 0, void 0, function* () { - let followSymbolicLinks = true; - if (options && typeof options.followSymbolicLinks === 'boolean') { - followSymbolicLinks = options.followSymbolicLinks; - } - const globber = yield create(patterns, { followSymbolicLinks }); - return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose); - }); -} -exports.hashFiles = hashFiles; -//# sourceMappingURL=glob.js.map - -/***/ }), - -/***/ 51026: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOptions = void 0; -const core = __importStar(__nccwpck_require__(42186)); -/** - * Returns a copy with defaults filled in. - */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - matchDirectories: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.matchDirectories === 'boolean') { - result.matchDirectories = copy.matchDirectories; - core.debug(`matchDirectories '${result.matchDirectories}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } - } - return result; -} -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map - -/***/ }), - -/***/ 28298: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DefaultGlobber = void 0; -const core = __importStar(__nccwpck_require__(42186)); -const fs = __importStar(__nccwpck_require__(57147)); -const globOptionsHelper = __importStar(__nccwpck_require__(51026)); -const path = __importStar(__nccwpck_require__(71017)); -const patternHelper = __importStar(__nccwpck_require__(29005)); -const internal_match_kind_1 = __nccwpck_require__(81063); -const internal_pattern_1 = __nccwpck_require__(64536); -const internal_search_state_1 = __nccwpck_require__(89117); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(22037); +const fs_1 = __nccwpck_require__(57147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; } - glob() { - var e_1, _a; + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } + if (this._filePath) { + return this._filePath; } - return result; - }); - } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); - } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { - yield yield __await(item.path); - } - // Descend? - else if (!partialMatch) { - continue; - } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); - } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); } + this._filePath = pathFromEnv; + return this._filePath; }); } /** - * Constructs a DefaultGlobber + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element */ - static create(patterns, options) { + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); - } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; - } - // Pattern - else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); }); } - static stat(item, options, traversalChain) { + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); - } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } - else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); - } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; - } - // Update the traversal chain - traversalChain.push(realPath); - } - return stats; + return this.emptyBuffer().write({ overwrite: true }); }); } -} -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map - -/***/ }), - -/***/ 2448: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.hashFiles = void 0; -const crypto = __importStar(__nccwpck_require__(6113)); -const core = __importStar(__nccwpck_require__(42186)); -const fs = __importStar(__nccwpck_require__(57147)); -const stream = __importStar(__nccwpck_require__(12781)); -const util = __importStar(__nccwpck_require__(73837)); -const path = __importStar(__nccwpck_require__(71017)); -function hashFiles(globber, currentWorkspace, verbose = false) { - var e_1, _a; - var _b; - return __awaiter(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core.info : core.debug; - let hasMatch = false; - const githubWorkspace = currentWorkspace - ? currentWorkspace - : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); - const result = crypto.createHash('sha256'); - let count = 0; - try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { - writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); - continue; - } - if (fs.statSync(file).isDirectory()) { - writeDelegate(`Skip directory '${file}'.`); - continue; - } - const hash = crypto.createHash('sha256'); - const pipeline = util.promisify(stream.pipeline); - yield pipeline(fs.createReadStream(file), hash); - result.write(hash.digest()); - count++; - if (!hasMatch) { - hasMatch = true; + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); } - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); - } - finally { if (e_1) throw e_1.error; } - } - result.end(); - if (hasMatch) { - writeDelegate(`Found ${count} files to hash.`); - return result.digest('hex'); - } - else { - writeDelegate(`No matches found for glob`); - return ''; - } - }); -} -exports.hashFiles = hashFiles; -//# sourceMappingURL=internal-hash-files.js.map - -/***/ }), - -/***/ 81063: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MatchKind = void 0; -/** - * Indicates whether a pattern matches a path - */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map - -/***/ }), - -/***/ 1849: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; -const path = __importStar(__nccwpck_require__(71017)); -const assert_1 = __importDefault(__nccwpck_require__(39491)); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. - * - * For example, on Linux/macOS: - * - `/ => /` - * - `/hello => /` - * - * For example, on Windows: - * - `C:\ => C:\` - * - `C:\hello => C:\` - * - `C: => C:` - * - `C:hello => C:` - * - `\ => \` - * - `\hello => \` - * - `\\hello => \\hello` - * - `\\hello\world => \\hello\world` - */ -function dirname(p) { - // Normalize slashes and trim unnecessary trailing slash - p = safeTrimTrailingSeparator(p); - // Windows UNC root, e.g. \\hello or \\hello\world - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; - } - // Get dirname - let result = path.dirname(p); - // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); } - return result; -} -exports.dirname = dirname; -/** - * Roots the path if not already rooted. On Windows, relative roots like `\` - * or `C:` are expanded based on the current working directory. - */ -function ensureAbsoluteRoot(root, itemPath) { - assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - // Already rooted - if (hasAbsoluteRoot(itemPath)) { - return itemPath; + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); } - // Windows - if (IS_WINDOWS) { - // Check for itemPath like C: or C:foo - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - // Drive letter matches cwd? Expand to cwd - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - // Drive only, e.g. C: - if (itemPath.length === 2) { - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}`; - } - // Drive + path, e.g. C:foo - else { - if (!cwd.endsWith('\\')) { - cwd += '\\'; - } - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; - } - } - // Different drive - else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; - } - } - // Check for itemPath like \ or \foo - else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; - } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); } - assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - // Otherwise ensure root ends with a separator - if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { - // Intentionally empty + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); } - else { - // Append separator - root += path.sep; + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); } - return root + itemPath; -} -exports.ensureAbsoluteRoot = ensureAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\\hello\share` and `C:\hello` (and using alternate separator). - */ -function hasAbsoluteRoot(itemPath) { - assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \\hello\share or C:\hello - return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasAbsoluteRoot = hasAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). - */ -function hasRoot(itemPath) { - assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \ or \hello or \\hello - // E.g. C: or C:\hello - return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasRoot = hasRoot; -/** - * Removes redundant slashes and converts `/` to `\` on Windows - */ -function normalizeSeparators(p) { - p = p || ''; - // Windows - if (IS_WINDOWS) { - // Convert slashes on Windows - p = p.replace(/\//g, '\\'); - // Remove redundant slashes - const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello - return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); } - // Remove redundant slashes - return p.replace(/\/\/+/g, '/'); } -exports.normalizeSeparators = normalizeSeparators; +const _summary = new Summary(); /** - * Normalizes the path separators and trims the trailing separator (when safe). - * For example, `/foo/ => /foo` but `/ => /` + * @deprecated use `core.summary` */ -function safeTrimTrailingSeparator(p) { - // Short-circuit if empty - if (!p) { - return ''; - } - // Normalize separators - p = normalizeSeparators(p); - // No trailing slash - if (!p.endsWith(path.sep)) { - return p; - } - // Check '/' on Linux/macOS and '\' on Windows - if (p === path.sep) { - return p; - } - // On Windows check if drive root. E.g. C:\ - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; - } - // Otherwise trim trailing slash - return p.substr(0, p.length - 1); -} -exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; -//# sourceMappingURL=internal-path-helper.js.map +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map /***/ }), -/***/ 96836: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Path = void 0; -const path = __importStar(__nccwpck_require__(71017)); -const pathHelper = __importStar(__nccwpck_require__(1849)); -const assert_1 = __importDefault(__nccwpck_require__(39491)); -const IS_WINDOWS = process.platform === 'win32'; +exports.toCommandProperties = exports.toCommandValue = void 0; /** - * Helper class for parsing paths into segments + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string */ -class Path { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - // String - if (typeof itemPath === 'string') { - assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // Not rooted - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path.sep); - } - // Rooted - else { - // Add all segments, while not at the root - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - // Add the segment - const basename = path.basename(remaining); - this.segments.unshift(basename); - // Truncate the last segment - remaining = dir; - dir = pathHelper.dirname(remaining); - } - // Remainder is the root - this.segments.unshift(remaining); - } - } - // Array - else { - // Must not be empty - assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - // Each segment - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - // Must not be empty - assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); - // Normalize slashes - segment = pathHelper.normalizeSeparators(itemPath[i]); - // Root segment - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } - // All other segments - else { - // Must not contain slash - assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } - } - } +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; } - /** - * Converts the path to it's string representation - */ - toString() { - // First segment - let result = this.segments[0]; - // All others - let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } - else { - result += path.sep; - } - result += this.segments[i]; - } - return result; + else if (typeof input === 'string' || input instanceof String) { + return input; } + return JSON.stringify(input); } -exports.Path = Path; -//# sourceMappingURL=internal-path.js.map +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map /***/ }), -/***/ 29005: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 78974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; + +Object.defineProperty(exports, "__esModule", ({ + value: true })); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.partialMatch = exports.match = exports.getSearchPaths = void 0; -const pathHelper = __importStar(__nccwpck_require__(1849)); -const internal_match_kind_1 = __nccwpck_require__(81063); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Given an array of patterns, returns an array of paths to search. - * Duplicates and paths under other included paths are filtered out. - */ -function getSearchPaths(patterns) { - // Ignore negate patterns - patterns = patterns.filter(x => !x.negate); - // Create a map of all search paths - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - searchPathMap[key] = 'candidate'; - } - const result = []; - for (const pattern of patterns) { - // Check if already included - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - if (searchPathMap[key] === 'included') { - continue; - } - // Check for an ancestor search path - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; - } - tempKey = parent; - parent = pathHelper.dirname(tempKey); - } - // Include the search pattern in the result - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = 'included'; - } - } - return result; -} -exports.getSearchPaths = getSearchPaths; -/** - * Matches the patterns against the path - */ -function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); - } - else { - result |= pattern.match(itemPath); - } - } - return result; -} -exports.match = match; -/** - * Checks whether to descend further into the directory - */ -function partialMatch(patterns, itemPath) { - return patterns.some(x => !x.negate && x.partialMatch(itemPath)); -} -exports.partialMatch = partialMatch; -//# sourceMappingURL=internal-pattern-helper.js.map +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(81595)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(26993)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(51472)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(16217)); + +var _nil = _interopRequireDefault(__nccwpck_require__(32381)); + +var _version = _interopRequireDefault(__nccwpck_require__(40427)); + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(26385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), -/***/ 64536: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 5842: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; + +Object.defineProperty(exports, "__esModule", ({ + value: true })); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Pattern = void 0; -const os = __importStar(__nccwpck_require__(22037)); -const path = __importStar(__nccwpck_require__(71017)); -const pathHelper = __importStar(__nccwpck_require__(1849)); -const assert_1 = __importDefault(__nccwpck_require__(39491)); -const minimatch_1 = __nccwpck_require__(83973); -const internal_match_kind_1 = __nccwpck_require__(81063); -const internal_path_1 = __nccwpck_require__(96836); -const IS_WINDOWS = process.platform === 'win32'; -class Pattern { - constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { - /** - * Indicates whether matches should be excluded from the result set - */ - this.negate = false; - // Pattern overload - let pattern; - if (typeof patternOrNegate === 'string') { - pattern = patternOrNegate.trim(); - } - // Segments overload - else { - // Convert to pattern - segments = segments || []; - assert_1.default(segments.length, `Parameter 'segments' must not empty`); - const root = Pattern.getLiteral(segments[0]); - assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; - } - } - // Negate - while (pattern.startsWith('!')) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); - } - // Normalize slashes and ensures absolute root - pattern = Pattern.fixupPattern(pattern, homedir); - // Segments - this.segments = new internal_path_1.Path(pattern).segments; - // Trailing slash indicates the pattern should only match directories, not regular files - this.trailingSeparator = pathHelper - .normalizeSeparators(pattern) - .endsWith(path.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - // Search path (literal path prior to the first glob segment) - let foundGlob = false; - const searchSegments = this.segments - .map(x => Pattern.getLiteral(x)) - .filter(x => !foundGlob && !(foundGlob = x === '')); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - // Root RegExp (required when determining partial match) - this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); - this.isImplicitPattern = isImplicitPattern; - // Create minimatch - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); - } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - // Last segment is globstar? - if (this.segments[this.segments.length - 1] === '**') { - // Normalize slashes - itemPath = pathHelper.normalizeSeparators(itemPath); - // Append a trailing slash. Otherwise Minimatch will not match the directory immediately - // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns - // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. - if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { - // Note, this is safe because the constructor ensures the pattern has an absolute root. - // For example, formats like C: and C:foo on Windows are resolved to an absolute root. - itemPath = `${itemPath}${path.sep}`; - } - } - else { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - } - // Match - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; - } - return internal_match_kind_1.MatchKind.None; - } - /** - * Indicates whether the pattern may match descendants of the specified path - */ - partialMatch(itemPath) { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // matchOne does not handle root path correctly - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); - } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); - } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS - .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment - .replace(/\?/g, '[?]') // escape '?' - .replace(/\*/g, '[*]'); // escape '*' - } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern, homedir) { - // Empty - assert_1.default(pattern, 'pattern cannot be empty'); - // Must not contain `.` segment, unless first segment - // Must not contain `..` segment - const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); - assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r - assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - // Normalize slashes - pattern = pathHelper.normalizeSeparators(pattern); - // Replace leading `.` segment - if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { - pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); - } - // Replace leading `~` segment - else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { - homedir = homedir || os.homedir(); - assert_1.default(homedir, 'Unable to determine HOME directory'); - assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = Pattern.globEscape(homedir) + pattern.substr(1); - } - // Replace relative drive root, e.g. pattern is C: or C:foo - else if (IS_WINDOWS && - (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(2); - } - // Replace relative root, e.g. pattern is \ or \foo - else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); - if (!root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(1); - } - // Otherwise ensure absolute root - else { - pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); - } - return pathHelper.normalizeSeparators(pattern); - } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ''; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - // Escape - if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } - // Wildcard - else if (c === '*' || c === '?') { - return ''; - } - // Character set - else if (c === '[' && i + 1 < segment.length) { - let set = ''; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - // Escape - if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { - set += segment[++i2]; - continue; - } - // Closed - else if (c2 === ']') { - closed = i2; - break; - } - // Otherwise - else { - set += c2; - } - } - // Closed? - if (closed >= 0) { - // Cannot convert - if (set.length > 1) { - return ''; - } - // Convert to literal - if (set) { - literal += set; - i = closed; - continue; - } - } - // Otherwise fall thru - } - // Append - literal += c; - } - return literal; - } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); - } +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); } -exports.Pattern = Pattern; -//# sourceMappingURL=internal-pattern.js.map + +var _default = md5; +exports["default"] = _default; /***/ }), -/***/ 89117: +/***/ 32381: /***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SearchState = void 0; -class SearchState { - constructor(path, level) { - this.path = path; - this.level = level; - } -} -exports.SearchState = SearchState; -//# sourceMappingURL=internal-search-state.js.map + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; /***/ }), -/***/ 35526: -/***/ (function(__unused_webpack_module, exports) { +/***/ 26385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map -/***/ }), +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; -/***/ 96255: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); -"use strict"; +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -/* eslint-disable @typescript-eslint/no-explicit-any */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 86230: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 9784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 38844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 61458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 81595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } - Object.defineProperty(o, k2, desc); + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 26993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(65920)); + +var _md = _interopRequireDefault(__nccwpck_require__(5842)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 65920: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(26385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 51472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(61458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 16217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(65920)); + +var _sha = _interopRequireDefault(__nccwpck_require__(38844)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 92609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(86230)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 40427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(92609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 71514: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -8991,7 +10809,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -9005,716 +10823,769 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(13685)); -const https = __importStar(__nccwpck_require__(95687)); -const pm = __importStar(__nccwpck_require__(19835)); -const tunnel = __importStar(__nccwpck_require__(74294)); -const undici_1 = __nccwpck_require__(41773); -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers || (exports.Headers = Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(71576); +const tr = __importStar(__nccwpck_require__(88159)); /** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code */ -function getProxyUrl(serverUrl) { - const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); } -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); } -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - })); - }); +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map + +/***/ }), + +/***/ 88159: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const events = __importStar(__nccwpck_require__(82361)); +const child = __importStar(__nccwpck_require__(32081)); +const path = __importStar(__nccwpck_require__(71017)); +const io = __importStar(__nccwpck_require__(47351)); +const ioUtil = __importStar(__nccwpck_require__(81962)); +const timers_1 = __nccwpck_require__(39512); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; } - readBodyBuffer() { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - const chunks = []; - this.message.on('data', (chunk) => { - chunks.push(chunk); - }); - this.message.on('end', () => { - resolve(Buffer.concat(chunks)); - }); - })); - }); + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - const parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; } } + return cmd; } - options(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - }); - } - get(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - }); - } - del(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - }); - } - post(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - }); - } - patch(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - }); - } - put(requestUrl, data, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - }); - } - head(requestUrl, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - }); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return __awaiter(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream, additionalHeaders); - }); + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - getJson(requestUrl, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - const res = yield this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; } - postJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; } - putJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + _endsWith(str, end) { + return str.endsWith(end); } - patchJson(requestUrl, obj, additionalHeaders = {}) { - return __awaiter(this, void 0, void 0, function* () { - const data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - const res = yield this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - }); + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - request(verb, requestUrl, data, headers) { - return __awaiter(this, void 0, void 0, function* () { - if (this._disposed) { - throw new Error('Client has already been disposed.'); + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; } - const parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - do { - response = yield this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (const handler of this.handlers) { - if (handler.canHandleAuthentication(response)) { - authenticationHandler = handler; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (response.message.statusCode && - HttpRedirectCodes.includes(response.message.statusCode) && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - const parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol === 'https:' && - parsedUrl.protocol !== parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - yield response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (const header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = yield this.requestRaw(info, data); - redirectsRemaining--; - } - if (!response.message.statusCode || - !HttpResponseRetryCodes.includes(response.message.statusCode)) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - yield response.readBody(); - yield this._performExponentialBackoff(numTries); - } - } while (numTries < maxTries); - return response; - }); + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; } - this._disposed = true; + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; } /** - * Raw request. - * @param info - * @param data + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number */ - requestRaw(info, data) { + exec() { return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - function callbackForResult(err, res) { - if (err) { - reject(err); + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); } - else if (!res) { - // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); } else { - resolve(res); + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); } + cp.stdin.end(this.options.input); } - this.requestRawWithCallback(info, data, callbackForResult); - }); + })); }); } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; - } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; } - } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); + else { + append(c); } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); + continue; } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); + if (c === '\\' && escaped) { + append(c); + continue; } - else { - req.end(); + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; } + append(c); } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); + if (arg.length > 0) { + args.push(arg.trim()); } - getAgentDispatcher(serverUrl) { - const parsedUrl = new URL(serverUrl); - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (!useProxy) { - return; + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; } - return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; + CheckComplete() { + if (this.done) { + return; } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } + if (this.processClosed) { + this._setResult(); } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); } - return lowercaseKeys(headers || {}); } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; + _debug(message) { + this.emit('debug', message); } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (!useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if tunneling agent isn't assigned create a new agent - if (!agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; } - return agent; + this.done = true; + this.emit('done', error, this.processExitCode); } - _getProxyAgentDispatcher(parsedUrl, proxyUrl) { - let proxyAgent; - if (this._keepAlive) { - proxyAgent = this._proxyAgentDispatcher; - } - // if agent is already assigned use that agent. - if (proxyAgent) { - return proxyAgent; + static HandleTimeout(state) { + if (state.done) { + return; } - const usingSsl = parsedUrl.protocol === 'https:'; - proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { - token: `${proxyUrl.username}:${proxyUrl.password}` - }))); - this._proxyAgentDispatcher = proxyAgent; - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { - rejectUnauthorized: false - }); + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); } - return proxyAgent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = `Failed request: (${statusCode})`; - } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - })); - }); + state._setResult(); } } -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map +//# sourceMappingURL=toolrunner.js.map /***/ }), -/***/ 19835: -/***/ ((__unused_webpack_module, exports) => { +/***/ 74087: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - })(); - if (proxyVar) { - try { - return new URL(proxyVar); - } - catch (_a) { - if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) - return new URL(`http://${proxyVar}`); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(57147); +const os_1 = __nccwpck_require__(22037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const reqHost = reqUrl.hostname; - if (isLoopbackAddress(reqHost)) { - return true; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperNoProxyItem === '*' || - upperReqHosts.some(x => x === upperNoProxyItem || - x.endsWith(`.${upperNoProxyItem}`) || - (upperNoProxyItem.startsWith('.') && - x.endsWith(`${upperNoProxyItem}`)))) { - return true; + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } - return false; -} -exports.checkBypass = checkBypass; -function isLoopbackAddress(host) { - const hostLower = host.toLowerCase(); - return (hostLower === 'localhost' || - hostLower.startsWith('127.') || - hostLower.startsWith('[::1]') || - hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } -//# sourceMappingURL=proxy.js.map +exports.Context = Context; +//# sourceMappingURL=context.js.map /***/ }), -/***/ 81962: +/***/ 95438: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -9738,173 +11609,27 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; -const fs = __importStar(__nccwpck_require__(57147)); -const path = __importStar(__nccwpck_require__(71017)); -_a = fs.promises -// export const {open} = 'fs' -, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -// export const {open} = 'fs' -exports.IS_WINDOWS = process.platform === 'win32'; -// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 -exports.UV_FS_O_EXLOCK = 0x10000000; -exports.READONLY = fs.constants.O_RDONLY; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); -} -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); -} -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). - */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); - } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello - } - return p.startsWith('/'); -} -exports.isRooted = isRooted; +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(74087)); +const utils_1 = __nccwpck_require__(73030); +exports.context = new Context.Context(); /** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ''; - }); -} -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); - } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); -} -// Get the path of cmd.exe in windows -function getCmdPath() { - var _a; - return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options)); } -exports.getCmdPath = getCmdPath; -//# sourceMappingURL=io-util.js.map +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map /***/ }), -/***/ 47351: +/***/ 47914: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -9928,290 +11653,148 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; -const assert_1 = __nccwpck_require__(39491); -const path = __importStar(__nccwpck_require__(71017)); -const ioUtil = __importStar(__nccwpck_require__(81962)); -/** - * Copies a file or folder. - * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js - * - * @param source source path - * @param dest destination path - * @param options optional. See CopyOptions. - */ -function cp(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const { force, recursive, copySourceDirectory } = readCopyOptions(options); - const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; - // Dest is an existing file, but not forcing - if (destStat && destStat.isFile() && !force) { - return; - } - // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() && copySourceDirectory - ? path.join(dest, path.basename(source)) - : dest; - if (!(yield ioUtil.exists(source))) { - throw new Error(`no such file or directory: ${source}`); - } - const sourceStat = yield ioUtil.stat(source); - if (sourceStat.isDirectory()) { - if (!recursive) { - throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); - } - else { - yield cpDirRecursive(source, newDest, 0, force); - } - } - else { - if (path.relative(source, newDest) === '') { - // a file cannot be copied to itself - throw new Error(`'${newDest}' and '${source}' are the same file`); - } - yield copyFile(source, newDest, force); - } - }); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(96255)); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; } -exports.cp = cp; -/** - * Moves a path. - * - * @param source source path - * @param dest destination path - * @param options optional. See MoveOptions. - */ -function mv(source, dest, options = {}) { - return __awaiter(this, void 0, void 0, function* () { - if (yield ioUtil.exists(dest)) { - let destExists = true; - if (yield ioUtil.isDirectory(dest)) { - // If dest is directory copy src into dest - dest = path.join(dest, path.basename(source)); - destExists = yield ioUtil.exists(dest); - } - if (destExists) { - if (options.force == null || options.force) { - yield rmRF(dest); - } - else { - throw new Error('Destination already exists'); - } - } - } - yield mkdirP(path.dirname(dest)); - yield ioUtil.rename(source, dest); - }); +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); } -exports.mv = mv; -/** - * Remove a path recursively with force - * - * @param inputPath path to remove - */ -function rmRF(inputPath) { - return __awaiter(this, void 0, void 0, function* () { - if (ioUtil.IS_WINDOWS) { - // Check for invalid characters - // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file - if (/[*"<>|]/.test(inputPath)) { - throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); - } - } - try { - // note if path does not exist, error is silent - yield ioUtil.rm(inputPath, { - force: true, - maxRetries: 3, - recursive: true, - retryDelay: 300 - }); - } - catch (err) { - throw new Error(`File was unable to be removed ${err}`); - } - }); +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } -exports.rmRF = rmRF; +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 73030: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(74087)); +const Utils = __importStar(__nccwpck_require__(47914)); +// octokit + plugins +const core_1 = __nccwpck_require__(76762); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(83044); +const plugin_paginate_rest_1 = __nccwpck_require__(64193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); /** - * Make a directory. Creates the full path with folders in between - * Will throw if it fails + * Convience function to correctly format Octokit Options to pass into the constructor. * - * @param fsPath path to create - * @returns Promise + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function mkdirP(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - yield ioUtil.mkdir(fsPath, { recursive: true }); - }); +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; } -exports.mkdirP = mkdirP; +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 28090: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashFiles = exports.create = void 0; +const internal_globber_1 = __nccwpck_require__(28298); +const internal_hash_files_1 = __nccwpck_require__(2448); /** - * Returns path of a tool had the tool actually been invoked. Resolves via paths. - * If you check and the tool does not exist, it will throw. + * Constructs a globber * - * @param tool name of the tool - * @param check whether to check if tool exists - * @returns Promise path to tool + * @param patterns Patterns separated by newlines + * @param options Glob options */ -function which(tool, check) { +function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // recursive when check=true - if (check) { - const result = yield which(tool, false); - if (!result) { - if (ioUtil.IS_WINDOWS) { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); - } - else { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); - } - } - return result; - } - const matches = yield findInPath(tool); - if (matches && matches.length > 0) { - return matches[0]; - } - return ''; + return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } -exports.which = which; +exports.create = create; /** - * Returns a list of all occurrences of the given tool on the system path. + * Computes the sha256 hash of a glob * - * @returns Promise the paths of the tool + * @param patterns Patterns separated by newlines + * @param currentWorkspace Workspace used when matching files + * @param options Glob options + * @param verbose Enables verbose logging */ -function findInPath(tool) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { - for (const extension of process.env['PATHEXT'].split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return [filePath]; - } - return []; - } - // if any path separators, return empty - if (tool.includes(path.sep)) { - return []; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } - } - // find all matches - const matches = []; - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); - if (filePath) { - matches.push(filePath); - } - } - return matches; - }); -} -exports.findInPath = findInPath; -function readCopyOptions(options) { - const force = options.force == null ? true : options.force; - const recursive = Boolean(options.recursive); - const copySourceDirectory = options.copySourceDirectory == null - ? true - : Boolean(options.copySourceDirectory); - return { force, recursive, copySourceDirectory }; -} -function cpDirRecursive(sourceDir, destDir, currentDepth, force) { - return __awaiter(this, void 0, void 0, function* () { - // Ensure there is not a run away recursive copy - if (currentDepth >= 255) - return; - currentDepth++; - yield mkdirP(destDir); - const files = yield ioUtil.readdir(sourceDir); - for (const fileName of files) { - const srcFile = `${sourceDir}/${fileName}`; - const destFile = `${destDir}/${fileName}`; - const srcFileStat = yield ioUtil.lstat(srcFile); - if (srcFileStat.isDirectory()) { - // Recurse - yield cpDirRecursive(srcFile, destFile, currentDepth, force); - } - else { - yield copyFile(srcFile, destFile, force); - } - } - // Change the mode for the newly created directory - yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); - }); -} -// Buffered file copy -function copyFile(srcFile, destFile, force) { +function hashFiles(patterns, currentWorkspace = '', options, verbose = false) { return __awaiter(this, void 0, void 0, function* () { - if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { - // unlink/re-link it - try { - yield ioUtil.lstat(destFile); - yield ioUtil.unlink(destFile); - } - catch (e) { - // Try to override file permission - if (e.code === 'EPERM') { - yield ioUtil.chmod(destFile, '0666'); - yield ioUtil.unlink(destFile); - } - // other errors = it doesn't exist, no work to do - } - // Copy over symlink - const symlinkFull = yield ioUtil.readlink(srcFile); - yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); - } - else if (!(yield ioUtil.exists(destFile)) || force) { - yield ioUtil.copyFile(srcFile, destFile); + let followSymbolicLinks = true; + if (options && typeof options.followSymbolicLinks === 'boolean') { + followSymbolicLinks = options.followSymbolicLinks; } + const globber = yield create(patterns, { followSymbolicLinks }); + return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose); }); } -//# sourceMappingURL=io.js.map +exports.hashFiles = hashFiles; +//# sourceMappingURL=glob.js.map /***/ }), -/***/ 32473: -/***/ (function(module, exports, __nccwpck_require__) { +/***/ 51026: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -10234,118 +11817,45 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0; -const semver = __importStar(__nccwpck_require__(70562)); -const core_1 = __nccwpck_require__(42186); -// needs to be require for core node modules to be mocked -/* eslint @typescript-eslint/no-require-imports: 0 */ -const os = __nccwpck_require__(22037); -const cp = __nccwpck_require__(32081); -const fs = __nccwpck_require__(57147); -function _findMatch(versionSpec, stable, candidates, archFilter) { - return __awaiter(this, void 0, void 0, function* () { - const platFilter = os.platform(); - let result; - let match; - let file; - for (const candidate of candidates) { - const version = candidate.version; - core_1.debug(`check ${version} satisfies ${versionSpec}`); - if (semver.satisfies(version, versionSpec) && - (!stable || candidate.stable === stable)) { - file = candidate.files.find(item => { - core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); - let chk = item.arch === archFilter && item.platform === platFilter; - if (chk && item.platform_version) { - const osVersion = module.exports._getOsVersion(); - if (osVersion === item.platform_version) { - chk = true; - } - else { - chk = semver.satisfies(osVersion, item.platform_version); - } - } - return chk; - }); - if (file) { - core_1.debug(`matched ${candidate.version}`); - match = candidate; - break; - } - } +exports.getOptions = void 0; +const core = __importStar(__nccwpck_require__(42186)); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + matchDirectories: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } - if (match && file) { - // clone since we're mutating the file list to be only the file that matches - result = Object.assign({}, match); - result.files = [file]; + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); } - return result; - }); -} -exports._findMatch = _findMatch; -function _getOsVersion() { - // TODO: add windows and other linux, arm variants - // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python) - const plat = os.platform(); - let version = ''; - if (plat === 'darwin') { - version = cp.execSync('sw_vers -productVersion').toString(); - } - else if (plat === 'linux') { - // lsb_release process not in some containers, readfile - // Run cat /etc/lsb-release - // DISTRIB_ID=Ubuntu - // DISTRIB_RELEASE=18.04 - // DISTRIB_CODENAME=bionic - // DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS" - const lsbContents = module.exports._readLinuxVersionFile(); - if (lsbContents) { - const lines = lsbContents.split('\n'); - for (const line of lines) { - const parts = line.split('='); - if (parts.length === 2 && - (parts[0].trim() === 'VERSION_ID' || - parts[0].trim() === 'DISTRIB_RELEASE')) { - version = parts[1] - .trim() - .replace(/^"/, '') - .replace(/"$/, ''); - break; - } - } + if (typeof copy.matchDirectories === 'boolean') { + result.matchDirectories = copy.matchDirectories; + core.debug(`matchDirectories '${result.matchDirectories}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } - return version; -} -exports._getOsVersion = _getOsVersion; -function _readLinuxVersionFile() { - const lsbReleaseFile = '/etc/lsb-release'; - const osReleaseFile = '/etc/os-release'; - let contents = ''; - if (fs.existsSync(lsbReleaseFile)) { - contents = fs.readFileSync(lsbReleaseFile).toString(); - } - else if (fs.existsSync(osReleaseFile)) { - contents = fs.readFileSync(osReleaseFile).toString(); - } - return contents; + return result; } -exports._readLinuxVersionFile = _readLinuxVersionFile; -//# sourceMappingURL=manifest.js.map +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map /***/ }), -/***/ 38279: +/***/ 28298: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -10378,64 +11888,216 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RetryHelper = void 0; +exports.DefaultGlobber = void 0; const core = __importStar(__nccwpck_require__(42186)); -/** - * Internal class for retries - */ -class RetryHelper { - constructor(maxAttempts, minSeconds, maxSeconds) { - if (maxAttempts < 1) { - throw new Error('max attempts should be greater than or equal to 1'); - } - this.maxAttempts = maxAttempts; - this.minSeconds = Math.floor(minSeconds); - this.maxSeconds = Math.floor(maxSeconds); - if (this.minSeconds > this.maxSeconds) { - throw new Error('min seconds should be less than or equal to max seconds'); - } +const fs = __importStar(__nccwpck_require__(57147)); +const globOptionsHelper = __importStar(__nccwpck_require__(51026)); +const path = __importStar(__nccwpck_require__(71017)); +const patternHelper = __importStar(__nccwpck_require__(29005)); +const internal_match_kind_1 = __nccwpck_require__(81063); +const internal_pattern_1 = __nccwpck_require__(64536); +const internal_search_state_1 = __nccwpck_require__(89117); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); } - execute(action, isRetryable) { + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; return __awaiter(this, void 0, void 0, function* () { - let attempt = 1; - while (attempt < this.maxAttempts) { - // Try + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { try { - return yield action(); + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); } catch (err) { - if (isRetryable && !isRetryable(err)) { - throw err; + if (err.code === 'ENOENT') { + continue; } - core.info(err.message); + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); } - // Sleep - const seconds = this.getSleepAmount(); - core.info(`Waiting ${seconds} seconds before trying again`); - yield this.sleep(seconds); - attempt++; } - // Last attempt - return yield action(); }); } - getSleepAmount() { - return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + - this.minSeconds); + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); } - sleep(seconds) { + static stat(item, options, traversalChain) { return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => setTimeout(resolve, seconds * 1000)); + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; }); } } -exports.RetryHelper = RetryHelper; -//# sourceMappingURL=retry-helper.js.map +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map /***/ }), -/***/ 27784: +/***/ 2448: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -10468,6196 +12130,33751 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0; +exports.hashFiles = void 0; +const crypto = __importStar(__nccwpck_require__(6113)); const core = __importStar(__nccwpck_require__(42186)); -const io = __importStar(__nccwpck_require__(47351)); const fs = __importStar(__nccwpck_require__(57147)); -const mm = __importStar(__nccwpck_require__(32473)); -const os = __importStar(__nccwpck_require__(22037)); -const path = __importStar(__nccwpck_require__(71017)); -const httpm = __importStar(__nccwpck_require__(96255)); -const semver = __importStar(__nccwpck_require__(70562)); const stream = __importStar(__nccwpck_require__(12781)); const util = __importStar(__nccwpck_require__(73837)); -const assert_1 = __nccwpck_require__(39491); -const v4_1 = __importDefault(__nccwpck_require__(17468)); -const exec_1 = __nccwpck_require__(71514); -const retry_helper_1 = __nccwpck_require__(38279); -class HTTPError extends Error { - constructor(httpStatusCode) { - super(`Unexpected HTTP response: ${httpStatusCode}`); - this.httpStatusCode = httpStatusCode; - Object.setPrototypeOf(this, new.target.prototype); - } -} -exports.HTTPError = HTTPError; -const IS_WINDOWS = process.platform === 'win32'; -const IS_MAC = process.platform === 'darwin'; -const userAgent = 'actions/tool-cache'; -/** - * Download a tool from an url and stream it into a file - * - * @param url url of tool to download - * @param dest path to download tool - * @param auth authorization header - * @param headers other headers - * @returns path to downloaded tool - */ -function downloadTool(url, dest, auth, headers) { - return __awaiter(this, void 0, void 0, function* () { - dest = dest || path.join(_getTempDirectory(), v4_1.default()); - yield io.mkdirP(path.dirname(dest)); - core.debug(`Downloading ${url}`); - core.debug(`Destination ${dest}`); - const maxAttempts = 3; - const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10); - const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20); - const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); - return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { - return yield downloadToolAttempt(url, dest || '', auth, headers); - }), (err) => { - if (err instanceof HTTPError && err.httpStatusCode) { - // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests - if (err.httpStatusCode < 500 && - err.httpStatusCode !== 408 && - err.httpStatusCode !== 429) { - return false; - } - } - // Otherwise retry - return true; - }); - }); -} -exports.downloadTool = downloadTool; -function downloadToolAttempt(url, dest, auth, headers) { +const path = __importStar(__nccwpck_require__(71017)); +function hashFiles(globber, currentWorkspace, verbose = false) { + var e_1, _a; + var _b; return __awaiter(this, void 0, void 0, function* () { - if (fs.existsSync(dest)) { - throw new Error(`Destination file path ${dest} already exists`); - } - // Get the response headers - const http = new httpm.HttpClient(userAgent, [], { - allowRetries: false - }); - if (auth) { - core.debug('set auth'); - if (headers === undefined) { - headers = {}; - } - headers.authorization = auth; - } - const response = yield http.get(url, headers); - if (response.message.statusCode !== 200) { - const err = new HTTPError(response.message.statusCode); - core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); - throw err; - } - // Download the response body - const pipeline = util.promisify(stream.pipeline); - const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message); - const readStream = responseMessageFactory(); - let succeeded = false; + const writeDelegate = verbose ? core.info : core.debug; + let hasMatch = false; + const githubWorkspace = currentWorkspace + ? currentWorkspace + : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const result = crypto.createHash('sha256'); + let count = 0; try { - yield pipeline(readStream, fs.createWriteStream(dest)); - core.debug('download complete'); - succeeded = true; - return dest; - } - finally { - // Error, delete dest before retry - if (!succeeded) { - core.debug('download failed'); - try { - yield io.rmRF(dest); + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + writeDelegate(file); + if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { + writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); + continue; } - catch (err) { - core.debug(`Failed to delete '${dest}'. ${err.message}`); + if (fs.statSync(file).isDirectory()) { + writeDelegate(`Skip directory '${file}'.`); + continue; + } + const hash = crypto.createHash('sha256'); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs.createReadStream(file), hash); + result.write(hash.digest()); + count++; + if (!hasMatch) { + hasMatch = true; } } } - }); -} -/** - * Extract a .7z file - * - * @param file path to the .7z file - * @param dest destination directory. Optional. - * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this - * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will - * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is - * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line - * interface, it is smaller than the full command line interface, and it does support long paths. At the - * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website. - * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path - * to 7zr.exe can be pass to this function. - * @returns path to the destination directory - */ -function extract7z(file, dest, _7zPath) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS'); - assert_1.ok(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - const originalCwd = process.cwd(); - process.chdir(dest); - if (_7zPath) { - try { - const logLevel = core.isDebug() ? '-bb1' : '-bb0'; - const args = [ - 'x', - logLevel, - '-bd', - '-sccUTF-8', - file - ]; - const options = { - silent: true - }; - yield exec_1.exec(`"${_7zPath}"`, args, options); - } - finally { - process.chdir(originalCwd); - } - } - else { - const escapedScript = path - .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1') - .replace(/'/g, "''") - .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); - const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); - const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; - const args = [ - '-NoLogo', - '-Sta', - '-NoProfile', - '-NonInteractive', - '-ExecutionPolicy', - 'Unrestricted', - '-Command', - command - ]; - const options = { - silent: true - }; + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { try { - const powershellPath = yield io.which('powershell', true); - yield exec_1.exec(`"${powershellPath}"`, args, options); - } - finally { - process.chdir(originalCwd); + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); } + finally { if (e_1) throw e_1.error; } } - return dest; - }); -} -exports.extract7z = extract7z; -/** - * Extract a compressed tar archive - * - * @param file path to the tar - * @param dest destination directory. Optional. - * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional. - * @returns path to the destination directory - */ -function extractTar(file, dest, flags = 'xz') { - return __awaiter(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); - } - // Create dest - dest = yield _createExtractFolder(dest); - // Determine whether GNU tar - core.debug('Checking tar --version'); - let versionOutput = ''; - yield exec_1.exec('tar --version', [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => (versionOutput += data.toString()), - stderr: (data) => (versionOutput += data.toString()) - } - }); - core.debug(versionOutput.trim()); - const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR'); - // Initialize args - let args; - if (flags instanceof Array) { - args = flags; + result.end(); + if (hasMatch) { + writeDelegate(`Found ${count} files to hash.`); + return result.digest('hex'); } else { - args = [flags]; - } - if (core.isDebug() && !flags.includes('v')) { - args.push('-v'); - } - let destArg = dest; - let fileArg = file; - if (IS_WINDOWS && isGnuTar) { - args.push('--force-local'); - destArg = dest.replace(/\\/g, '/'); - // Technically only the dest needs to have `/` but for aesthetic consistency - // convert slashes in the file arg too. - fileArg = file.replace(/\\/g, '/'); - } - if (isGnuTar) { - // Suppress warnings when using GNU tar to extract archives created by BSD tar - args.push('--warning=no-unknown-keyword'); - args.push('--overwrite'); + writeDelegate(`No matches found for glob`); + return ''; } - args.push('-C', destArg, '-f', fileArg); - yield exec_1.exec(`tar`, args); - return dest; }); } -exports.extractTar = extractTar; +exports.hashFiles = hashFiles; +//# sourceMappingURL=internal-hash-files.js.map + +/***/ }), + +/***/ 81063: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MatchKind = void 0; /** - * Extract a xar compatible archive - * - * @param file path to the archive - * @param dest destination directory. Optional. - * @param flags flags for the xar. Optional. - * @returns path to the destination directory + * Indicates whether a pattern matches a path */ -function extractXar(file, dest, flags = []) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(IS_MAC, 'extractXar() not supported on current OS'); - assert_1.ok(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - let args; - if (flags instanceof Array) { - args = flags; - } - else { - args = [flags]; - } - args.push('-x', '-C', dest, '-f', file); - if (core.isDebug()) { - args.push('-v'); - } - const xarPath = yield io.which('xar', true); - yield exec_1.exec(`"${xarPath}"`, _unique(args)); - return dest; - }); -} -exports.extractXar = extractXar; -/** - * Extract a zip - * - * @param file path to the zip - * @param dest destination directory. Optional. - * @returns path to the destination directory - */ -function extractZip(file, dest) { - return __awaiter(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); - } - dest = yield _createExtractFolder(dest); - if (IS_WINDOWS) { - yield extractZipWin(file, dest); - } - else { - yield extractZipNix(file, dest); - } - return dest; - }); -} -exports.extractZip = extractZip; -function extractZipWin(file, dest) { - return __awaiter(this, void 0, void 0, function* () { - // build the powershell command - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines - const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); - const pwshPath = yield io.which('pwsh', false); - //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory - //and the -Force flag for Expand-Archive as a fallback - if (pwshPath) { - //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive - const pwshCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, - `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, - `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` - ].join(' '); - const args = [ - '-NoLogo', - '-NoProfile', - '-NonInteractive', - '-ExecutionPolicy', - 'Unrestricted', - '-Command', - pwshCommand - ]; - core.debug(`Using pwsh at path: ${pwshPath}`); - yield exec_1.exec(`"${pwshPath}"`, args); - } - else { - const powershellCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, - `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, - `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` - ].join(' '); - const args = [ - '-NoLogo', - '-Sta', - '-NoProfile', - '-NonInteractive', - '-ExecutionPolicy', - 'Unrestricted', - '-Command', - powershellCommand - ]; - const powershellPath = yield io.which('powershell', true); - core.debug(`Using powershell at path: ${powershellPath}`); - yield exec_1.exec(`"${powershellPath}"`, args); - } - }); -} -function extractZipNix(file, dest) { - return __awaiter(this, void 0, void 0, function* () { - const unzipPath = yield io.which('unzip', true); - const args = [file]; - if (!core.isDebug()) { - args.unshift('-q'); - } - args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run - yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); - }); -} -/** - * Caches a directory and installs it into the tool cacheDir - * - * @param sourceDir the directory to cache into tools - * @param tool tool name - * @param version version of the tool. semver format - * @param arch architecture of the tool. Optional. Defaults to machine architecture - */ -function cacheDir(sourceDir, tool, version, arch) { - return __awaiter(this, void 0, void 0, function* () { - version = semver.clean(version) || version; - arch = arch || os.arch(); - core.debug(`Caching tool ${tool} ${version} ${arch}`); - core.debug(`source dir: ${sourceDir}`); - if (!fs.statSync(sourceDir).isDirectory()) { - throw new Error('sourceDir is not a directory'); - } - // Create the tool dir - const destPath = yield _createToolPath(tool, version, arch); - // copy each child item. do not move. move can fail on Windows - // due to anti-virus software having an open handle on a file. - for (const itemName of fs.readdirSync(sourceDir)) { - const s = path.join(sourceDir, itemName); - yield io.cp(s, destPath, { recursive: true }); - } - // write .complete - _completeToolPath(tool, version, arch); - return destPath; - }); -} -exports.cacheDir = cacheDir; +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), + +/***/ 1849: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(__nccwpck_require__(71017)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const IS_WINDOWS = process.platform === 'win32'; /** - * Caches a downloaded file (GUID) and installs it - * into the tool cache with a given targetName + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * - * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid. - * @param targetFile the name of the file name in the tools directory - * @param tool tool name - * @param version version of the tool. semver format - * @param arch architecture of the tool. Optional. Defaults to machine architecture - */ -function cacheFile(sourceFile, targetFile, tool, version, arch) { - return __awaiter(this, void 0, void 0, function* () { - version = semver.clean(version) || version; - arch = arch || os.arch(); - core.debug(`Caching tool ${tool} ${version} ${arch}`); - core.debug(`source file: ${sourceFile}`); - if (!fs.statSync(sourceFile).isFile()) { - throw new Error('sourceFile is not a file'); - } - // create the tool dir - const destFolder = yield _createToolPath(tool, version, arch); - // copy instead of move. move can fail on Windows due to - // anti-virus software having an open handle on a file. - const destPath = path.join(destFolder, targetFile); - core.debug(`destination file ${destPath}`); - yield io.cp(sourceFile, destPath); - // write .complete - _completeToolPath(tool, version, arch); - return destFolder; - }); -} -exports.cacheFile = cacheFile; -/** - * Finds the path to a tool version in the local installed tool cache + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` * - * @param toolName name of the tool - * @param versionSpec version of the tool - * @param arch optional arch. defaults to arch of computer + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` */ -function find(toolName, versionSpec, arch) { - if (!toolName) { - throw new Error('toolName parameter is required'); - } - if (!versionSpec) { - throw new Error('versionSpec parameter is required'); - } - arch = arch || os.arch(); - // attempt to resolve an explicit version - if (!isExplicitVersion(versionSpec)) { - const localVersions = findAllVersions(toolName, arch); - const match = evaluateVersions(localVersions, versionSpec); - versionSpec = match; +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; } - // check for the explicit version in the cache - let toolPath = ''; - if (versionSpec) { - versionSpec = semver.clean(versionSpec) || ''; - const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch); - core.debug(`checking cache: ${cachePath}`); - if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { - core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); - toolPath = cachePath; - } - else { - core.debug('not found'); - } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); } - return toolPath; + return result; } -exports.find = find; +exports.dirname = dirname; /** - * Finds the paths to all versions of a tool that are installed in the local tool cache - * - * @param toolName name of the tool - * @param arch optional arch. defaults to arch of computer + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. */ -function findAllVersions(toolName, arch) { - const versions = []; - arch = arch || os.arch(); - const toolPath = path.join(_getCacheDirectory(), toolName); - if (fs.existsSync(toolPath)) { - const children = fs.readdirSync(toolPath); - for (const child of children) { - if (isExplicitVersion(child)) { - const fullPath = path.join(toolPath, child, arch || ''); - if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { - versions.push(child); - } - } - } +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; } - return versions; -} -exports.findAllVersions = findAllVersions; -function getManifestFromRepo(owner, repo, auth, branch = 'master') { - return __awaiter(this, void 0, void 0, function* () { - let releases = []; - const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; - const http = new httpm.HttpClient('tool-cache'); - const headers = {}; - if (auth) { - core.debug('set auth'); - headers.authorization = auth; - } - const response = yield http.getJson(treeUrl, headers); - if (!response.result) { - return releases; - } - let manifestUrl = ''; - for (const item of response.result.tree) { - if (item.path === 'versions-manifest.json') { - manifestUrl = item.url; - break; - } - } - headers['accept'] = 'application/vnd.github.VERSION.raw'; - let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); - if (versionsRaw) { - // shouldn't be needed but protects against invalid json saved with BOM - versionsRaw = versionsRaw.replace(/^\uFEFF/, ''); - try { - releases = JSON.parse(versionsRaw); + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } } - catch (_a) { - core.debug('Invalid json'); + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; } } - return releases; - }); -} -exports.getManifestFromRepo = getManifestFromRepo; -function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { - return __awaiter(this, void 0, void 0, function* () { - // wrap the internal impl - const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); - return match; - }); -} -exports.findFromManifest = findFromManifest; -function _createExtractFolder(dest) { - return __awaiter(this, void 0, void 0, function* () { - if (!dest) { - // create a temp dir - dest = path.join(_getTempDirectory(), v4_1.default()); - } - yield io.mkdirP(dest); - return dest; - }); -} -function _createToolPath(tool, version, arch) { - return __awaiter(this, void 0, void 0, function* () { - const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); - core.debug(`destination ${folderPath}`); - const markerPath = `${folderPath}.complete`; - yield io.rmRF(folderPath); - yield io.rmRF(markerPath); - yield io.mkdirP(folderPath); - return folderPath; - }); -} -function _completeToolPath(tool, version, arch) { - const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); - const markerPath = `${folderPath}.complete`; - fs.writeFileSync(markerPath, ''); - core.debug('finished caching tool'); -} -/** - * Check if version string is explicit - * - * @param versionSpec version string to check - */ -function isExplicitVersion(versionSpec) { - const c = semver.clean(versionSpec) || ''; - core.debug(`isExplicit: ${c}`); - const valid = semver.valid(c) != null; - core.debug(`explicit? ${valid}`); - return valid; -} -exports.isExplicitVersion = isExplicitVersion; -/** - * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec` - * - * @param versions array of versions to evaluate - * @param versionSpec semantic version spec to satisfy - */ -function evaluateVersions(versions, versionSpec) { - let version = ''; - core.debug(`evaluating ${versions.length} versions`); - versions = versions.sort((a, b) => { - if (semver.gt(a, b)) { - return 1; - } - return -1; - }); - for (let i = versions.length - 1; i >= 0; i--) { - const potential = versions[i]; - const satisfied = semver.satisfies(potential, versionSpec); - if (satisfied) { - version = potential; - break; + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; } } - if (version) { - core.debug(`matched: ${version}`); + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty } else { - core.debug('match not found'); + // Append separator + root += path.sep; } - return version; + return root + itemPath; } -exports.evaluateVersions = evaluateVersions; +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; /** - * Gets RUNNER_TOOL_CACHE + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). */ -function _getCacheDirectory() { - const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; - assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined'); - return cacheDirectory; +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); } +exports.hasAbsoluteRoot = hasAbsoluteRoot; /** - * Gets RUNNER_TEMP + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ -function _getTempDirectory() { - const tempDirectory = process.env['RUNNER_TEMP'] || ''; - assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); - return tempDirectory; +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); } +exports.hasRoot = hasRoot; /** - * Gets a global variable + * Removes redundant slashes and converts `/` to `\` on Windows */ -function _getGlobal(key, defaultValue) { - /* eslint-disable @typescript-eslint/no-explicit-any */ - const value = global[key]; - /* eslint-enable @typescript-eslint/no-explicit-any */ - return value !== undefined ? value : defaultValue; +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); } +exports.normalizeSeparators = normalizeSeparators; /** - * Returns an array of unique values. - * @param values Values to make unique. + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` */ -function _unique(values) { - return Array.from(new Set(values)); +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); } -//# sourceMappingURL=tool-cache.js.map +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map /***/ }), -/***/ 70562: -/***/ ((module, exports) => { +/***/ 96836: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -exports = module.exports = SemVer +"use strict"; -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Path = void 0; +const path = __importStar(__nccwpck_require__(71017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } } +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +/***/ }), -var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 +/***/ 29005: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -// The actual regexps go on exports.re -var re = exports.re = [] -var safeRe = exports.safeRe = [] -var src = exports.src = [] -var t = exports.tokens = {} -var R = 0 +"use strict"; -function tok (n) { - t[n] = R++ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(__nccwpck_require__(1849)); +const internal_match_kind_1 = __nccwpck_require__(81063); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; } - -var LETTERDASHNUMBER = '[a-zA-Z0-9-]' - -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -var safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] - -function makeSafeRe (value) { - for (var i = 0; i < safeRegexReplacements.length; i++) { - var token = safeRegexReplacements[i][0] - var max = safeRegexReplacements[i][1] - value = value - .split(token + '*').join(token + '{0,' + max + '}') - .split(token + '+').join(token + '{1,' + max + '}') - } - return value +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; } - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -tok('NUMERICIDENTIFIER') -src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' -tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -tok('MAINVERSION') -src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')' - -tok('MAINVERSIONLOOSE') -src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -tok('PRERELEASEIDENTIFIER') -src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -tok('PRERELEASEIDENTIFIERLOOSE') -src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -tok('PRERELEASE') -src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' - -tok('PRERELEASELOOSE') -src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -tok('BUILD') -src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + - '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -tok('FULL') -tok('FULLPLAIN') -src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + - src[t.PRERELEASE] + '?' + - src[t.BUILD] + '?' - -src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -tok('LOOSEPLAIN') -src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + - src[t.PRERELEASELOOSE] + '?' + - src[t.BUILD] + '?' - -tok('LOOSE') -src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' - -tok('GTLT') -src[t.GTLT] = '((?:<|>)?=?)' - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -tok('XRANGEIDENTIFIERLOOSE') -src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -tok('XRANGEIDENTIFIER') -src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' - -tok('XRANGEPLAIN') -src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:' + src[t.PRERELEASE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' - -tok('XRANGEPLAINLOOSE') -src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[t.PRERELEASELOOSE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' - -tok('XRANGE') -src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' -tok('XRANGELOOSE') -src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -tok('COERCE') -src[t.COERCE] = '(^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -tok('COERCERTL') -re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') -safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -tok('LONETILDE') -src[t.LONETILDE] = '(?:~>?)' - -tok('TILDETRIM') -src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' -re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') -safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') -var tildeTrimReplace = '$1~' - -tok('TILDE') -src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' -tok('TILDELOOSE') -src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -tok('LONECARET') -src[t.LONECARET] = '(?:\\^)' - -tok('CARETTRIM') -src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' -re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') -safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') -var caretTrimReplace = '$1^' - -tok('CARET') -src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' -tok('CARETLOOSE') -src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -tok('COMPARATORLOOSE') -src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' -tok('COMPARATOR') -src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -tok('COMPARATORTRIM') -src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + - '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' - -// this one has to use the /g flag -re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') -safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') -var comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -tok('HYPHENRANGE') -src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAIN] + ')' + - '\\s*$' - -tok('HYPHENRANGELOOSE') -src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s*$' - -// Star ranges basically just allow anything at all. -tok('STAR') -src[t.STAR] = '(<|>)?=?\\s*\\*' - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - - // Replace all greedy whitespace to prevent regex dos issues. These regex are - // used internally via the safeRe object since all inputs in this library get - // normalized first to trim and collapse all extra whitespace. The original - // regexes are exported for userland consumption and lower level usage. A - // future breaking change could export the safer regex only with a note that - // all input should have extra whitespace removed. - safeRe[i] = new RegExp(makeSafeRe(src[i])) - } +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +/***/ }), - if (version instanceof SemVer) { - return version - } +/***/ 64536: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (typeof version !== 'string') { - return null - } +"use strict"; - if (version.length > MAX_LENGTH) { - return null - } - - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] - if (!r.test(version)) { - return null - } - - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} - -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} - -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} - -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Pattern = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const minimatch_1 = __nccwpck_require__(83973); +const internal_match_kind_1 = __nccwpck_require__(81063); +const internal_path_1 = __nccwpck_require__(96836); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } - - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) +/***/ }), - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +/***/ 89117: +/***/ ((__unused_webpack_module, exports) => { - this.raw = version +"use strict"; - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } +/***/ }), - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } +/***/ 35526: +/***/ (function(__unused_webpack_module, exports) { - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } +"use strict"; - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() -} - -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } - -SemVer.prototype.toString = function () { - return this.version +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } - -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} +/***/ }), -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +/***/ 96255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } +"use strict"; - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - } while (++i) -} - -SemVer.prototype.compareBuild = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - var i = 0 - do { - var a = this.build[i] - var b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break - - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +const pm = __importStar(__nccwpck_require__(19835)); +const tunnel = __importStar(__nccwpck_require__(74294)); +const undici_1 = __nccwpck_require__(41773); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; } - -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } - - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } } - -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); } - return defaultResult // may be undefined - } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} - -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} - -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} - -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} - -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} - -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} - -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} - -exports.compareBuild = compareBuild -function compareBuild (a, b, loose) { - var versionA = new SemVer(a, loose) - var versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} - -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} - -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(a, b, loose) - }) -} - -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(b, a, loose) - }) -} - -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} - -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} - -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} - -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} - -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} - -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 } - -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError('Invalid operator: ' + op) - } +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; } - -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } } - } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); } - } - - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } - - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) -} - -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var m = comp.match(r) - - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } -} - -Comparator.prototype.toString = function () { - return this.value -} - -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); } - } - - return cmp(version, this.operator, this.semver, this.options) -} - -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); } - } - - var rangeTmp - - if (this.operator === '') { - if (this.value === '') { - return true + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); } - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - if (comp.value === '') { - return true + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); } - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} - -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') - - // First, split based on boolean or || - this.set = this.raw.split('||').map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + this.raw) - } - - this.format() -} - -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} - -Range.prototype.toString = function () { - return this.range -} - -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - - return set -} - -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some(function (thisComparators) { - return ( - isSatisfiable(thisComparators, options) && - range.set.some(function (rangeComparators) { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every(function (thisComparator) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) -} - -// take a set of comparators and determine whether there -// exists a version which can satisfy it -function isSatisfiable (comparators, options) { - var result = true - var remainingComparators = comparators.slice() - var testComparator = remainingComparators.pop() - - while (result && remainingComparators.length) { - result = remainingComparators.every(function (otherComparator) { - return testComparator.intersects(otherComparator, options) - }) - - testComparator = remainingComparators.pop() - } - - return result -} - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} - -function replaceTilde (comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} - -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - - debug('caret return', ret) - return ret - }) -} - -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} - -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } - - ret = gtlt + M + '.' + m + '.' + p + pr - } else if (xm) { - ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr - } else if (xp) { - ret = '>=' + M + '.' + m + '.0' + pr + - ' <' + M + '.' + (+m + 1) + '.0' + pr + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(safeRe[t.STAR], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() -} - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - } - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); } - } - return false -} - -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } - } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (!useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if tunneling agent isn't assigned create a new agent + if (!agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map - // Version has a -pre, but it's not one of the ones we like. - return false - } +/***/ }), - return true -} +/***/ 19835: +/***/ ((__unused_webpack_module, exports) => { -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} +"use strict"; -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } + } + else { + return undefined; } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) } - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; } - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } +//# sourceMappingURL=proxy.js.map -exports.coerce = coerce -function coerce (version, options) { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } +/***/ }), - if (typeof version !== 'string') { - return null - } +/***/ 81962: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - options = options || {} +"use strict"; - var match = null - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - var next - while ((next = safeRe[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(57147)); +const path = __importStar(__nccwpck_require__(71017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); } - // leave it in a clean state - safeRe[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(match[2] + - '.' + (match[3] || '0') + - '.' + (match[4] || '0'), options) + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); } - - -/***/ }), - -/***/ 7701: -/***/ ((module) => { - +exports.isRooted = isRooted; /** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); } - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); } - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 37269: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(6113); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map /***/ }), -/***/ 17468: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 47351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -var rng = __nccwpck_require__(37269); -var bytesToUuid = __nccwpck_require__(7701); +"use strict"; -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - options = options || {}; - - var rnds = options.random || (options.rng || rng)(); - - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; - - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } - - return buf || bytesToUuid(rnds); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(39491); +const path = __importStar(__nccwpck_require__(71017)); +const ioUtil = __importStar(__nccwpck_require__(81962)); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); } - -module.exports = v4; - - -/***/ }), - -/***/ 2856: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const WritableStream = (__nccwpck_require__(84492).Writable) -const inherits = (__nccwpck_require__(47261).inherits) - -const StreamSearch = __nccwpck_require__(88534) - -const PartStream = __nccwpck_require__(38710) -const HeaderParser = __nccwpck_require__(90333) - -const DASH = 45 -const B_ONEDASH = Buffer.from('-') -const B_CRLF = Buffer.from('\r\n') -const EMPTY_FN = function () {} - -function Dicer (cfg) { - if (!(this instanceof Dicer)) { return new Dicer(cfg) } - WritableStream.call(this, cfg) - - if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } - - if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } - - this._headerFirst = cfg.headerFirst - - this._dashes = 0 - this._parts = 0 - this._finished = false - this._realFinish = false - this._isPreamble = true - this._justMatched = false - this._firstWrite = true - this._inHeader = true - this._part = undefined - this._cb = undefined - this._ignoreData = false - this._partOpts = { highWaterMark: cfg.partHwm } - this._pause = false - - const self = this - this._hparser = new HeaderParser(cfg) - this._hparser.on('header', function (header) { - self._inHeader = false - self._part.emit('header', header) - }) +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); } -inherits(Dicer, WritableStream) - -Dicer.prototype.emit = function (ev) { - if (ev === 'finish' && !this._realFinish) { - if (!this._finished) { - const self = this - process.nextTick(function () { - self.emit('error', new Error('Unexpected end of multipart data')) - if (self._part && !self._ignoreData) { - const type = (self._isPreamble ? 'Preamble' : 'Part') - self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) - self._part.push(null) - process.nextTick(function () { - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - return +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } } - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - } - } else { WritableStream.prototype.emit.apply(this, arguments) } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); } - -Dicer.prototype._write = function (data, encoding, cb) { - // ignore unexpected data (e.g. extra trailer data after finished) - if (!this._hparser && !this._bparser) { return cb() } - - if (this._headerFirst && this._isPreamble) { - if (!this._part) { - this._part = new PartStream(this._partOpts) - if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } - } - const r = this._hparser.push(data) - if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } - } - - // allows for "easier" testing - if (this._firstWrite) { - this._bparser.push(B_CRLF) - this._firstWrite = false - } - - this._bparser.push(data) - - if (this._pause) { this._cb = cb } else { cb() } +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); } - -Dicer.prototype.reset = function () { - this._part = undefined - this._bparser = undefined - this._hparser = undefined +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); } - -Dicer.prototype.setBoundary = function (boundary) { - const self = this - this._bparser = new StreamSearch('\r\n--' + boundary) - this._bparser.on('info', function (isMatch, data, start, end) { - self._oninfo(isMatch, data, start, end) - }) +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); } - -Dicer.prototype._ignore = function () { - if (this._part && !this._ignoreData) { - this._ignoreData = true - this._part.on('error', EMPTY_FN) - // we must perform some kind of read on the stream even though we are - // ignoring the data, otherwise node's Readable stream will not emit 'end' - // after pushing null to the stream - this._part.resume() - } +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; } - -Dicer.prototype._oninfo = function (isMatch, data, start, end) { - let buf; const self = this; let i = 0; let r; let shouldWriteMore = true - - if (!this._part && this._justMatched && data) { - while (this._dashes < 2 && (start + i) < end) { - if (data[start + i] === DASH) { - ++i - ++this._dashes - } else { - if (this._dashes) { buf = B_ONEDASH } - this._dashes = 0 - break - } - } - if (this._dashes === 2) { - if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } - this.reset() - this._finished = true - // no more parts will be added - if (self._parts === 0) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } - } - if (this._dashes) { return } - } - if (this._justMatched) { this._justMatched = false } - if (!this._part) { - this._part = new PartStream(this._partOpts) - this._part._read = function (n) { - self._unpause() - } - if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } - if (!this._isPreamble) { this._inHeader = true } - } - if (data && start < end && !this._ignoreData) { - if (this._isPreamble || !this._inHeader) { - if (buf) { shouldWriteMore = this._part.push(buf) } - shouldWriteMore = this._part.push(data.slice(start, end)) - if (!shouldWriteMore) { this._pause = true } - } else if (!this._isPreamble && this._inHeader) { - if (buf) { this._hparser.push(buf) } - r = this._hparser.push(data.slice(start, end)) - if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } - } - } - if (isMatch) { - this._hparser.reset() - if (this._isPreamble) { this._isPreamble = false } else { - if (start !== end) { - ++this._parts - this._part.on('end', function () { - if (--self._parts === 0) { - if (self._finished) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } else { - self._unpause() +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); } - } - }) - } - } - this._part.push(null) - this._part = undefined - this._ignoreData = false - this._justMatched = true - this._dashes = 0 - } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); } +//# sourceMappingURL=io.js.map -Dicer.prototype._unpause = function () { - if (!this._pause) { return } +/***/ }), - this._pause = false - if (this._cb) { - const cb = this._cb - this._cb = undefined - cb() - } -} - -module.exports = Dicer - - -/***/ }), - -/***/ 90333: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 32473: +/***/ (function(module, exports, __nccwpck_require__) { "use strict"; - -const EventEmitter = (__nccwpck_require__(15673).EventEmitter) -const inherits = (__nccwpck_require__(47261).inherits) -const getLimit = __nccwpck_require__(49692) - -const StreamSearch = __nccwpck_require__(88534) - -const B_DCRLF = Buffer.from('\r\n\r\n') -const RE_CRLF = /\r\n/g -const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex - -function HeaderParser (cfg) { - EventEmitter.call(this) - - cfg = cfg || {} - const self = this - this.nread = 0 - this.maxed = false - this.npairs = 0 - this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) - this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) - this.buffer = '' - this.header = {} - this.finished = false - this.ss = new StreamSearch(B_DCRLF) - this.ss.on('info', function (isMatch, data, start, end) { - if (data && !self.maxed) { - if (self.nread + end - start >= self.maxHeaderSize) { - end = self.maxHeaderSize - self.nread + start - self.nread = self.maxHeaderSize - self.maxed = true - } else { self.nread += (end - start) } - - self.buffer += data.toString('binary', start, end) - } - if (isMatch) { self._finish() } - }) -} -inherits(HeaderParser, EventEmitter) - -HeaderParser.prototype.push = function (data) { - const r = this.ss.push(data) - if (this.finished) { return r } -} - -HeaderParser.prototype.reset = function () { - this.finished = false - this.buffer = '' - this.header = {} - this.ss.reset() +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports._readLinuxVersionFile = exports._getOsVersion = exports._findMatch = void 0; +const semver = __importStar(__nccwpck_require__(70562)); +const core_1 = __nccwpck_require__(42186); +// needs to be require for core node modules to be mocked +/* eslint @typescript-eslint/no-require-imports: 0 */ +const os = __nccwpck_require__(22037); +const cp = __nccwpck_require__(32081); +const fs = __nccwpck_require__(57147); +function _findMatch(versionSpec, stable, candidates, archFilter) { + return __awaiter(this, void 0, void 0, function* () { + const platFilter = os.platform(); + let result; + let match; + let file; + for (const candidate of candidates) { + const version = candidate.version; + core_1.debug(`check ${version} satisfies ${versionSpec}`); + if (semver.satisfies(version, versionSpec) && + (!stable || candidate.stable === stable)) { + file = candidate.files.find(item => { + core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); + let chk = item.arch === archFilter && item.platform === platFilter; + if (chk && item.platform_version) { + const osVersion = module.exports._getOsVersion(); + if (osVersion === item.platform_version) { + chk = true; + } + else { + chk = semver.satisfies(osVersion, item.platform_version); + } + } + return chk; + }); + if (file) { + core_1.debug(`matched ${candidate.version}`); + match = candidate; + break; + } + } + } + if (match && file) { + // clone since we're mutating the file list to be only the file that matches + result = Object.assign({}, match); + result.files = [file]; + } + return result; + }); } - -HeaderParser.prototype._finish = function () { - if (this.buffer) { this._parseHeader() } - this.ss.matches = this.ss.maxMatches - const header = this.header - this.header = {} - this.buffer = '' - this.finished = true - this.nread = this.npairs = 0 - this.maxed = false - this.emit('header', header) +exports._findMatch = _findMatch; +function _getOsVersion() { + // TODO: add windows and other linux, arm variants + // right now filtering on version is only an ubuntu and macos scenario for tools we build for hosted (python) + const plat = os.platform(); + let version = ''; + if (plat === 'darwin') { + version = cp.execSync('sw_vers -productVersion').toString(); + } + else if (plat === 'linux') { + // lsb_release process not in some containers, readfile + // Run cat /etc/lsb-release + // DISTRIB_ID=Ubuntu + // DISTRIB_RELEASE=18.04 + // DISTRIB_CODENAME=bionic + // DISTRIB_DESCRIPTION="Ubuntu 18.04.4 LTS" + const lsbContents = module.exports._readLinuxVersionFile(); + if (lsbContents) { + const lines = lsbContents.split('\n'); + for (const line of lines) { + const parts = line.split('='); + if (parts.length === 2 && + (parts[0].trim() === 'VERSION_ID' || + parts[0].trim() === 'DISTRIB_RELEASE')) { + version = parts[1] + .trim() + .replace(/^"/, '') + .replace(/"$/, ''); + break; + } + } + } + } + return version; } - -HeaderParser.prototype._parseHeader = function () { - if (this.npairs === this.maxHeaderPairs) { return } - - const lines = this.buffer.split(RE_CRLF) - const len = lines.length - let m, h - - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (lines[i].length === 0) { continue } - if (lines[i][0] === '\t' || lines[i][0] === ' ') { - // folded header content - // RFC2822 says to just remove the CRLF and not the whitespace following - // it, so we follow the RFC and include the leading whitespace ... - if (h) { - this.header[h][this.header[h].length - 1] += lines[i] - continue - } +exports._getOsVersion = _getOsVersion; +function _readLinuxVersionFile() { + const lsbReleaseFile = '/etc/lsb-release'; + const osReleaseFile = '/etc/os-release'; + let contents = ''; + if (fs.existsSync(lsbReleaseFile)) { + contents = fs.readFileSync(lsbReleaseFile).toString(); } - - const posColon = lines[i].indexOf(':') - if ( - posColon === -1 || - posColon === 0 - ) { - return + else if (fs.existsSync(osReleaseFile)) { + contents = fs.readFileSync(osReleaseFile).toString(); } - m = RE_HDR.exec(lines[i]) - h = m[1].toLowerCase() - this.header[h] = this.header[h] || [] - this.header[h].push((m[2] || '')) - if (++this.npairs === this.maxHeaderPairs) { break } - } + return contents; } - -module.exports = HeaderParser - +exports._readLinuxVersionFile = _readLinuxVersionFile; +//# sourceMappingURL=manifest.js.map /***/ }), -/***/ 38710: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 38279: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -const inherits = (__nccwpck_require__(47261).inherits) -const ReadableStream = (__nccwpck_require__(84492).Readable) - -function PartStream (opts) { - ReadableStream.call(this, opts) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RetryHelper = void 0; +const core = __importStar(__nccwpck_require__(42186)); +/** + * Internal class for retries + */ +class RetryHelper { + constructor(maxAttempts, minSeconds, maxSeconds) { + if (maxAttempts < 1) { + throw new Error('max attempts should be greater than or equal to 1'); + } + this.maxAttempts = maxAttempts; + this.minSeconds = Math.floor(minSeconds); + this.maxSeconds = Math.floor(maxSeconds); + if (this.minSeconds > this.maxSeconds) { + throw new Error('min seconds should be less than or equal to max seconds'); + } + } + execute(action, isRetryable) { + return __awaiter(this, void 0, void 0, function* () { + let attempt = 1; + while (attempt < this.maxAttempts) { + // Try + try { + return yield action(); + } + catch (err) { + if (isRetryable && !isRetryable(err)) { + throw err; + } + core.info(err.message); + } + // Sleep + const seconds = this.getSleepAmount(); + core.info(`Waiting ${seconds} seconds before trying again`); + yield this.sleep(seconds); + attempt++; + } + // Last attempt + return yield action(); + }); + } + getSleepAmount() { + return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + + this.minSeconds); + } + sleep(seconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, seconds * 1000)); + }); + } } -inherits(PartStream, ReadableStream) - -PartStream.prototype._read = function (n) {} - -module.exports = PartStream - +exports.RetryHelper = RetryHelper; +//# sourceMappingURL=retry-helper.js.map /***/ }), -/***/ 88534: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 27784: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.evaluateVersions = exports.isExplicitVersion = exports.findFromManifest = exports.getManifestFromRepo = exports.findAllVersions = exports.find = exports.cacheFile = exports.cacheDir = exports.extractZip = exports.extractXar = exports.extractTar = exports.extract7z = exports.downloadTool = exports.HTTPError = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const io = __importStar(__nccwpck_require__(47351)); +const fs = __importStar(__nccwpck_require__(57147)); +const mm = __importStar(__nccwpck_require__(32473)); +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const httpm = __importStar(__nccwpck_require__(96255)); +const semver = __importStar(__nccwpck_require__(70562)); +const stream = __importStar(__nccwpck_require__(12781)); +const util = __importStar(__nccwpck_require__(73837)); +const assert_1 = __nccwpck_require__(39491); +const v4_1 = __importDefault(__nccwpck_require__(17468)); +const exec_1 = __nccwpck_require__(71514); +const retry_helper_1 = __nccwpck_require__(38279); +class HTTPError extends Error { + constructor(httpStatusCode) { + super(`Unexpected HTTP response: ${httpStatusCode}`); + this.httpStatusCode = httpStatusCode; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.HTTPError = HTTPError; +const IS_WINDOWS = process.platform === 'win32'; +const IS_MAC = process.platform === 'darwin'; +const userAgent = 'actions/tool-cache'; /** - * Copyright Brian White. All rights reserved. - * - * @see https://github.com/mscdex/streamsearch - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. + * Download a tool from an url and stream it into a file * - * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation - * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + * @param url url of tool to download + * @param dest path to download tool + * @param auth authorization header + * @param headers other headers + * @returns path to downloaded tool */ -const EventEmitter = (__nccwpck_require__(15673).EventEmitter) -const inherits = (__nccwpck_require__(47261).inherits) - -function SBMH (needle) { - if (typeof needle === 'string') { - needle = Buffer.from(needle) - } - - if (!Buffer.isBuffer(needle)) { - throw new TypeError('The needle has to be a String or a Buffer.') - } - - const needleLength = needle.length - - if (needleLength === 0) { - throw new Error('The needle cannot be an empty String/Buffer.') - } - - if (needleLength > 256) { - throw new Error('The needle cannot have a length bigger than 256.') - } - - this.maxMatches = Infinity - this.matches = 0 - - this._occ = new Array(256) - .fill(needleLength) // Initialize occurrence table. - this._lookbehind_size = 0 - this._needle = needle - this._bufpos = 0 - - this._lookbehind = Buffer.alloc(needleLength) - - // Populate occurrence table with analysis of the needle, - // ignoring last letter. - for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var - this._occ[needle[i]] = needleLength - 1 - i - } -} -inherits(SBMH, EventEmitter) - -SBMH.prototype.reset = function () { - this._lookbehind_size = 0 - this.matches = 0 - this._bufpos = 0 -} - -SBMH.prototype.push = function (chunk, pos) { - if (!Buffer.isBuffer(chunk)) { - chunk = Buffer.from(chunk, 'binary') - } - const chlen = chunk.length - this._bufpos = pos || 0 - let r - while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } - return r +function downloadTool(url, dest, auth, headers) { + return __awaiter(this, void 0, void 0, function* () { + dest = dest || path.join(_getTempDirectory(), v4_1.default()); + yield io.mkdirP(path.dirname(dest)); + core.debug(`Downloading ${url}`); + core.debug(`Destination ${dest}`); + const maxAttempts = 3; + const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10); + const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20); + const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); + return yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { + return yield downloadToolAttempt(url, dest || '', auth, headers); + }), (err) => { + if (err instanceof HTTPError && err.httpStatusCode) { + // Don't retry anything less than 500, except 408 Request Timeout and 429 Too Many Requests + if (err.httpStatusCode < 500 && + err.httpStatusCode !== 408 && + err.httpStatusCode !== 429) { + return false; + } + } + // Otherwise retry + return true; + }); + }); } - -SBMH.prototype._sbmh_feed = function (data) { - const len = data.length - const needle = this._needle - const needleLength = needle.length - const lastNeedleChar = needle[needleLength - 1] - - // Positive: points to a position in `data` - // pos == 3 points to data[3] - // Negative: points to a position in the lookbehind buffer - // pos == -2 points to lookbehind[lookbehind_size - 2] - let pos = -this._lookbehind_size - let ch - - if (pos < 0) { - // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool - // search with character lookup code that considers both the - // lookbehind buffer and the current round's haystack data. - // - // Loop until - // there is a match. - // or until - // we've moved past the position that requires the - // lookbehind buffer. In this case we switch to the - // optimized loop. - // or until - // the character to look at lies outside the haystack. - while (pos < 0 && pos <= len - needleLength) { - ch = this._sbmh_lookup_char(data, pos + needleLength - 1) - - if ( - ch === lastNeedleChar && - this._sbmh_memcmp(data, pos, needleLength - 1) - ) { - this._lookbehind_size = 0 - ++this.matches - this.emit('info', true) - - return (this._bufpos = pos + needleLength) - } - pos += this._occ[ch] - } - - // No match. - - if (pos < 0) { - // There's too few data for Boyer-Moore-Horspool to run, - // so let's use a different algorithm to skip as much as - // we can. - // Forward pos until - // the trailing part of lookbehind + data - // looks like the beginning of the needle - // or until - // pos == 0 - while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } - } - - if (pos >= 0) { - // Discard lookbehind buffer. - this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) - this._lookbehind_size = 0 - } else { - // Cut off part of the lookbehind buffer that has - // been processed and append the entire haystack - // into it. - const bytesToCutOff = this._lookbehind_size + pos - if (bytesToCutOff > 0) { - // The cut off data is guaranteed not to contain the needle. - this.emit('info', false, this._lookbehind, 0, bytesToCutOff) - } - - this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, - this._lookbehind_size - bytesToCutOff) - this._lookbehind_size -= bytesToCutOff - - data.copy(this._lookbehind, this._lookbehind_size) - this._lookbehind_size += len - - this._bufpos = len - return len - } - } - - pos += (pos >= 0) * this._bufpos - - // Lookbehind buffer is now empty. We only need to check if the - // needle is in the haystack. - if (data.indexOf(needle, pos) !== -1) { - pos = data.indexOf(needle, pos) - ++this.matches - if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } - - return (this._bufpos = pos + needleLength) - } else { - pos = len - needleLength - } - - // There was no match. If there's trailing haystack data that we cannot - // match yet using the Boyer-Moore-Horspool algorithm (because the trailing - // data is less than the needle size) then match using a modified - // algorithm that starts matching from the beginning instead of the end. - // Whatever trailing data is left after running this algorithm is added to - // the lookbehind buffer. - while ( - pos < len && - ( - data[pos] !== needle[0] || - ( - (Buffer.compare( - data.subarray(pos, pos + len - pos), - needle.subarray(0, len - pos) - ) !== 0) - ) - ) - ) { - ++pos - } - if (pos < len) { - data.copy(this._lookbehind, 0, pos, pos + (len - pos)) - this._lookbehind_size = len - pos - } - - // Everything until pos is guaranteed not to contain needle data. - if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } - - this._bufpos = len - return len +exports.downloadTool = downloadTool; +function downloadToolAttempt(url, dest, auth, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(dest)) { + throw new Error(`Destination file path ${dest} already exists`); + } + // Get the response headers + const http = new httpm.HttpClient(userAgent, [], { + allowRetries: false + }); + if (auth) { + core.debug('set auth'); + if (headers === undefined) { + headers = {}; + } + headers.authorization = auth; + } + const response = yield http.get(url, headers); + if (response.message.statusCode !== 200) { + const err = new HTTPError(response.message.statusCode); + core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + throw err; + } + // Download the response body + const pipeline = util.promisify(stream.pipeline); + const responseMessageFactory = _getGlobal('TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY', () => response.message); + const readStream = responseMessageFactory(); + let succeeded = false; + try { + yield pipeline(readStream, fs.createWriteStream(dest)); + core.debug('download complete'); + succeeded = true; + return dest; + } + finally { + // Error, delete dest before retry + if (!succeeded) { + core.debug('download failed'); + try { + yield io.rmRF(dest); + } + catch (err) { + core.debug(`Failed to delete '${dest}'. ${err.message}`); + } + } + } + }); } - -SBMH.prototype._sbmh_lookup_char = function (data, pos) { - return (pos < 0) - ? this._lookbehind[this._lookbehind_size + pos] - : data[pos] +/** + * Extract a .7z file + * + * @param file path to the .7z file + * @param dest destination directory. Optional. + * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this + * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will + * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is + * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line + * interface, it is smaller than the full command line interface, and it does support long paths. At the + * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website. + * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path + * to 7zr.exe can be pass to this function. + * @returns path to the destination directory + */ +function extract7z(file, dest, _7zPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + const originalCwd = process.cwd(); + process.chdir(dest); + if (_7zPath) { + try { + const logLevel = core.isDebug() ? '-bb1' : '-bb0'; + const args = [ + 'x', + logLevel, + '-bd', + '-sccUTF-8', + file + ]; + const options = { + silent: true + }; + yield exec_1.exec(`"${_7zPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } + } + else { + const escapedScript = path + .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1') + .replace(/'/g, "''") + .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + command + ]; + const options = { + silent: true + }; + try { + const powershellPath = yield io.which('powershell', true); + yield exec_1.exec(`"${powershellPath}"`, args, options); + } + finally { + process.chdir(originalCwd); + } + } + return dest; + }); } - -SBMH.prototype._sbmh_memcmp = function (data, pos, len) { - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } - } - return true +exports.extract7z = extract7z; +/** + * Extract a compressed tar archive + * + * @param file path to the tar + * @param dest destination directory. Optional. + * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional. + * @returns path to the destination directory + */ +function extractTar(file, dest, flags = 'xz') { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + // Create dest + dest = yield _createExtractFolder(dest); + // Determine whether GNU tar + core.debug('Checking tar --version'); + let versionOutput = ''; + yield exec_1.exec('tar --version', [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR'); + // Initialize args + let args; + if (flags instanceof Array) { + args = flags; + } + else { + args = [flags]; + } + if (core.isDebug() && !flags.includes('v')) { + args.push('-v'); + } + let destArg = dest; + let fileArg = file; + if (IS_WINDOWS && isGnuTar) { + args.push('--force-local'); + destArg = dest.replace(/\\/g, '/'); + // Technically only the dest needs to have `/` but for aesthetic consistency + // convert slashes in the file arg too. + fileArg = file.replace(/\\/g, '/'); + } + if (isGnuTar) { + // Suppress warnings when using GNU tar to extract archives created by BSD tar + args.push('--warning=no-unknown-keyword'); + args.push('--overwrite'); + } + args.push('-C', destArg, '-f', fileArg); + yield exec_1.exec(`tar`, args); + return dest; + }); } - -module.exports = SBMH - - -/***/ }), - -/***/ 33438: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const WritableStream = (__nccwpck_require__(84492).Writable) -const { inherits } = __nccwpck_require__(47261) -const Dicer = __nccwpck_require__(2856) - -const MultipartParser = __nccwpck_require__(90415) -const UrlencodedParser = __nccwpck_require__(16780) -const parseParams = __nccwpck_require__(34426) - -function Busboy (opts) { - if (!(this instanceof Busboy)) { return new Busboy(opts) } - - if (typeof opts !== 'object') { - throw new TypeError('Busboy expected an options-Object.') - } - if (typeof opts.headers !== 'object') { - throw new TypeError('Busboy expected an options-Object with headers-attribute.') - } - if (typeof opts.headers['content-type'] !== 'string') { - throw new TypeError('Missing Content-Type-header.') - } - - const { - headers, - ...streamOptions - } = opts - - this.opts = { - autoDestroy: false, - ...streamOptions - } - WritableStream.call(this, this.opts) - - this._done = false - this._parser = this.getParserByHeaders(headers) - this._finished = false +exports.extractTar = extractTar; +/** + * Extract a xar compatible archive + * + * @param file path to the archive + * @param dest destination directory. Optional. + * @param flags flags for the xar. Optional. + * @returns path to the destination directory + */ +function extractXar(file, dest, flags = []) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(IS_MAC, 'extractXar() not supported on current OS'); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + let args; + if (flags instanceof Array) { + args = flags; + } + else { + args = [flags]; + } + args.push('-x', '-C', dest, '-f', file); + if (core.isDebug()) { + args.push('-v'); + } + const xarPath = yield io.which('xar', true); + yield exec_1.exec(`"${xarPath}"`, _unique(args)); + return dest; + }); } -inherits(Busboy, WritableStream) - -Busboy.prototype.emit = function (ev) { - if (ev === 'finish') { - if (!this._done) { - this._parser?.end() - return - } else if (this._finished) { - return - } - this._finished = true - } - WritableStream.prototype.emit.apply(this, arguments) +exports.extractXar = extractXar; +/** + * Extract a zip + * + * @param file path to the zip + * @param dest destination directory. Optional. + * @returns path to the destination directory + */ +function extractZip(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + if (IS_WINDOWS) { + yield extractZipWin(file, dest); + } + else { + yield extractZipNix(file, dest); + } + return dest; + }); } - -Busboy.prototype.getParserByHeaders = function (headers) { - const parsed = parseParams(headers['content-type']) - - const cfg = { - defCharset: this.opts.defCharset, - fileHwm: this.opts.fileHwm, - headers, - highWaterMark: this.opts.highWaterMark, - isPartAFile: this.opts.isPartAFile, - limits: this.opts.limits, - parsedConType: parsed, - preservePath: this.opts.preservePath - } - - if (MultipartParser.detect.test(parsed[0])) { - return new MultipartParser(this, cfg) - } - if (UrlencodedParser.detect.test(parsed[0])) { - return new UrlencodedParser(this, cfg) - } - throw new Error('Unsupported Content-Type.') +exports.extractZip = extractZip; +function extractZipWin(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + // build the powershell command + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines + const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ''); + const pwshPath = yield io.which('pwsh', false); + //To match the file overwrite behavior on nix systems, we use the overwrite = true flag for ExtractToDirectory + //and the -Force flag for Expand-Archive as a fallback + if (pwshPath) { + //attempt to use pwsh with ExtractToDirectory, if this fails attempt Expand-Archive + const pwshCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, + `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, + `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` + ].join(' '); + const args = [ + '-NoLogo', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + pwshCommand + ]; + core.debug(`Using pwsh at path: ${pwshPath}`); + yield exec_1.exec(`"${pwshPath}"`, args); + } + else { + const powershellCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, + `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, + `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` + ].join(' '); + const args = [ + '-NoLogo', + '-Sta', + '-NoProfile', + '-NonInteractive', + '-ExecutionPolicy', + 'Unrestricted', + '-Command', + powershellCommand + ]; + const powershellPath = yield io.which('powershell', true); + core.debug(`Using powershell at path: ${powershellPath}`); + yield exec_1.exec(`"${powershellPath}"`, args); + } + }); } - -Busboy.prototype._write = function (chunk, encoding, cb) { - this._parser.write(chunk, cb) +function extractZipNix(file, dest) { + return __awaiter(this, void 0, void 0, function* () { + const unzipPath = yield io.which('unzip', true); + const args = [file]; + if (!core.isDebug()) { + args.unshift('-q'); + } + args.unshift('-o'); //overwrite with -o, otherwise a prompt is shown which freezes the run + yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); + }); } +/** + * Caches a directory and installs it into the tool cacheDir + * + * @param sourceDir the directory to cache into tools + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheDir(sourceDir, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source dir: ${sourceDir}`); + if (!fs.statSync(sourceDir).isDirectory()) { + throw new Error('sourceDir is not a directory'); + } + // Create the tool dir + const destPath = yield _createToolPath(tool, version, arch); + // copy each child item. do not move. move can fail on Windows + // due to anti-virus software having an open handle on a file. + for (const itemName of fs.readdirSync(sourceDir)) { + const s = path.join(sourceDir, itemName); + yield io.cp(s, destPath, { recursive: true }); + } + // write .complete + _completeToolPath(tool, version, arch); + return destPath; + }); +} +exports.cacheDir = cacheDir; +/** + * Caches a downloaded file (GUID) and installs it + * into the tool cache with a given targetName + * + * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid. + * @param targetFile the name of the file name in the tools directory + * @param tool tool name + * @param version version of the tool. semver format + * @param arch architecture of the tool. Optional. Defaults to machine architecture + */ +function cacheFile(sourceFile, targetFile, tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + version = semver.clean(version) || version; + arch = arch || os.arch(); + core.debug(`Caching tool ${tool} ${version} ${arch}`); + core.debug(`source file: ${sourceFile}`); + if (!fs.statSync(sourceFile).isFile()) { + throw new Error('sourceFile is not a file'); + } + // create the tool dir + const destFolder = yield _createToolPath(tool, version, arch); + // copy instead of move. move can fail on Windows due to + // anti-virus software having an open handle on a file. + const destPath = path.join(destFolder, targetFile); + core.debug(`destination file ${destPath}`); + yield io.cp(sourceFile, destPath); + // write .complete + _completeToolPath(tool, version, arch); + return destFolder; + }); +} +exports.cacheFile = cacheFile; +/** + * Finds the path to a tool version in the local installed tool cache + * + * @param toolName name of the tool + * @param versionSpec version of the tool + * @param arch optional arch. defaults to arch of computer + */ +function find(toolName, versionSpec, arch) { + if (!toolName) { + throw new Error('toolName parameter is required'); + } + if (!versionSpec) { + throw new Error('versionSpec parameter is required'); + } + arch = arch || os.arch(); + // attempt to resolve an explicit version + if (!isExplicitVersion(versionSpec)) { + const localVersions = findAllVersions(toolName, arch); + const match = evaluateVersions(localVersions, versionSpec); + versionSpec = match; + } + // check for the explicit version in the cache + let toolPath = ''; + if (versionSpec) { + versionSpec = semver.clean(versionSpec) || ''; + const cachePath = path.join(_getCacheDirectory(), toolName, versionSpec, arch); + core.debug(`checking cache: ${cachePath}`); + if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { + core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + toolPath = cachePath; + } + else { + core.debug('not found'); + } + } + return toolPath; +} +exports.find = find; +/** + * Finds the paths to all versions of a tool that are installed in the local tool cache + * + * @param toolName name of the tool + * @param arch optional arch. defaults to arch of computer + */ +function findAllVersions(toolName, arch) { + const versions = []; + arch = arch || os.arch(); + const toolPath = path.join(_getCacheDirectory(), toolName); + if (fs.existsSync(toolPath)) { + const children = fs.readdirSync(toolPath); + for (const child of children) { + if (isExplicitVersion(child)) { + const fullPath = path.join(toolPath, child, arch || ''); + if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { + versions.push(child); + } + } + } + } + return versions; +} +exports.findAllVersions = findAllVersions; +function getManifestFromRepo(owner, repo, auth, branch = 'master') { + return __awaiter(this, void 0, void 0, function* () { + let releases = []; + const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; + const http = new httpm.HttpClient('tool-cache'); + const headers = {}; + if (auth) { + core.debug('set auth'); + headers.authorization = auth; + } + const response = yield http.getJson(treeUrl, headers); + if (!response.result) { + return releases; + } + let manifestUrl = ''; + for (const item of response.result.tree) { + if (item.path === 'versions-manifest.json') { + manifestUrl = item.url; + break; + } + } + headers['accept'] = 'application/vnd.github.VERSION.raw'; + let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); + if (versionsRaw) { + // shouldn't be needed but protects against invalid json saved with BOM + versionsRaw = versionsRaw.replace(/^\uFEFF/, ''); + try { + releases = JSON.parse(versionsRaw); + } + catch (_a) { + core.debug('Invalid json'); + } + } + return releases; + }); +} +exports.getManifestFromRepo = getManifestFromRepo; +function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + // wrap the internal impl + const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); + return match; + }); +} +exports.findFromManifest = findFromManifest; +function _createExtractFolder(dest) { + return __awaiter(this, void 0, void 0, function* () { + if (!dest) { + // create a temp dir + dest = path.join(_getTempDirectory(), v4_1.default()); + } + yield io.mkdirP(dest); + return dest; + }); +} +function _createToolPath(tool, version, arch) { + return __awaiter(this, void 0, void 0, function* () { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + core.debug(`destination ${folderPath}`); + const markerPath = `${folderPath}.complete`; + yield io.rmRF(folderPath); + yield io.rmRF(markerPath); + yield io.mkdirP(folderPath); + return folderPath; + }); +} +function _completeToolPath(tool, version, arch) { + const folderPath = path.join(_getCacheDirectory(), tool, semver.clean(version) || version, arch || ''); + const markerPath = `${folderPath}.complete`; + fs.writeFileSync(markerPath, ''); + core.debug('finished caching tool'); +} +/** + * Check if version string is explicit + * + * @param versionSpec version string to check + */ +function isExplicitVersion(versionSpec) { + const c = semver.clean(versionSpec) || ''; + core.debug(`isExplicit: ${c}`); + const valid = semver.valid(c) != null; + core.debug(`explicit? ${valid}`); + return valid; +} +exports.isExplicitVersion = isExplicitVersion; +/** + * Get the highest satisfiying semantic version in `versions` which satisfies `versionSpec` + * + * @param versions array of versions to evaluate + * @param versionSpec semantic version spec to satisfy + */ +function evaluateVersions(versions, versionSpec) { + let version = ''; + core.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; +} +exports.evaluateVersions = evaluateVersions; +/** + * Gets RUNNER_TOOL_CACHE + */ +function _getCacheDirectory() { + const cacheDirectory = process.env['RUNNER_TOOL_CACHE'] || ''; + assert_1.ok(cacheDirectory, 'Expected RUNNER_TOOL_CACHE to be defined'); + return cacheDirectory; +} +/** + * Gets RUNNER_TEMP + */ +function _getTempDirectory() { + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert_1.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + return tempDirectory; +} +/** + * Gets a global variable + */ +function _getGlobal(key, defaultValue) { + /* eslint-disable @typescript-eslint/no-explicit-any */ + const value = global[key]; + /* eslint-enable @typescript-eslint/no-explicit-any */ + return value !== undefined ? value : defaultValue; +} +/** + * Returns an array of unique values. + * @param values Values to make unique. + */ +function _unique(values) { + return Array.from(new Set(values)); +} +//# sourceMappingURL=tool-cache.js.map -module.exports = Busboy -module.exports["default"] = Busboy -module.exports.Busboy = Busboy - -module.exports.Dicer = Dicer - - -/***/ }), - -/***/ 90415: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -// TODO: -// * support 1 nested multipart level -// (see second multipart example here: -// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) -// * support limits.fieldNameSize -// -- this will require modifications to utils.parseParams +/***/ }), -const { Readable } = __nccwpck_require__(84492) -const { inherits } = __nccwpck_require__(47261) +/***/ 70562: +/***/ ((module, exports) => { -const Dicer = __nccwpck_require__(2856) +exports = module.exports = SemVer -const parseParams = __nccwpck_require__(34426) -const decodeText = __nccwpck_require__(99136) -const basename = __nccwpck_require__(60496) -const getLimit = __nccwpck_require__(49692) +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} -const RE_BOUNDARY = /^boundary$/i -const RE_FIELD = /^form-data$/i -const RE_CHARSET = /^charset$/i -const RE_FILENAME = /^filename$/i -const RE_NAME = /^name$/i +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' -Multipart.detect = /^multipart\/form-data/i -function Multipart (boy, cfg) { - let i - let len - const self = this - let boundary - const limits = cfg.limits - const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) - const parsedConType = cfg.parsedConType || [] - const defCharset = cfg.defCharset || 'utf8' - const preservePath = cfg.preservePath - const fileOpts = { highWaterMark: cfg.fileHwm } +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 - for (i = 0, len = parsedConType.length; i < len; ++i) { - if (Array.isArray(parsedConType[i]) && - RE_BOUNDARY.test(parsedConType[i][0])) { - boundary = parsedConType[i][1] - break - } - } +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 - function checkFinished () { - if (nends === 0 && finished && !boy._done) { - finished = false - self.end() - } - } +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 - if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 - const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) - const filesLimit = getLimit(limits, 'files', Infinity) - const fieldsLimit = getLimit(limits, 'fields', Infinity) - const partsLimit = getLimit(limits, 'parts', Infinity) - const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) - const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) +function tok (n) { + t[n] = R++ +} - let nfiles = 0 - let nfields = 0 - let nends = 0 - let curFile - let curField - let finished = false +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' - this._needDrain = false - this._pause = false - this._cb = undefined - this._nparts = 0 - this._boy = boy +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] - const parserCfg = { - boundary, - maxHeaderPairs: headerPairsLimit, - maxHeaderSize: headerSizeLimit, - partHwm: fileOpts.highWaterMark, - highWaterMark: cfg.highWaterMark +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') } + return value +} - this.parser = new Dicer(parserCfg) - this.parser.on('drain', function () { - self._needDrain = false - if (self._cb && !self._pause) { - const cb = self._cb - self._cb = undefined - cb() - } - }).on('part', function onPart (part) { - if (++self._nparts > partsLimit) { - self.parser.removeListener('part', onPart) - self.parser.on('part', skipPart) - boy.hitPartsLimit = true - boy.emit('partsLimit') - return skipPart(part) - } +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. - // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let - // us emit 'end' early since we know the part has ended if we are already - // seeing the next part - if (curField) { - const field = curField - field.emit('end') - field.removeAllListeners('end') - } +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. - part.on('header', function (header) { - let contype - let fieldname - let parsed - let charset - let encoding - let filename - let nsize = 0 +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' - if (header['content-type']) { - parsed = parseParams(header['content-type'][0]) - if (parsed[0]) { - contype = parsed[0].toLowerCase() - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_CHARSET.test(parsed[i][0])) { - charset = parsed[i][1].toLowerCase() - break - } - } - } - } +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. - if (contype === undefined) { contype = 'text/plain' } - if (charset === undefined) { charset = defCharset } +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' - if (header['content-disposition']) { - parsed = parseParams(header['content-disposition'][0]) - if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_NAME.test(parsed[i][0])) { - fieldname = parsed[i][1] - } else if (RE_FILENAME.test(parsed[i][0])) { - filename = parsed[i][1] - if (!preservePath) { filename = basename(filename) } - } - } - } else { return skipPart(part) } +// ## Main Version +// Three dot-separated numeric identifiers. - if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' - let onData, - onEnd +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' - if (isPartAFile(fieldname, contype, filename)) { - // file/binary field - if (nfiles === filesLimit) { - if (!boy.hitFilesLimit) { - boy.hitFilesLimit = true - boy.emit('filesLimit') - } - return skipPart(part) - } +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. - ++nfiles +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' - if (!boy._events.file) { - self.parser._ignore() - return - } +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' - ++nends - const file = new FileStream(fileOpts) - curFile = file - file.on('end', function () { - --nends - self._pause = false - checkFinished() - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - }) - file._read = function (n) { - if (!self._pause) { return } - self._pause = false - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - } - boy.emit('file', fieldname, file, filename, encoding, contype) +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. - onData = function (data) { - if ((nsize += data.length) > fileSizeLimit) { - const extralen = fileSizeLimit - nsize + data.length - if (extralen > 0) { file.push(data.slice(0, extralen)) } - file.truncated = true - file.bytesRead = fileSizeLimit - part.removeAllListeners('data') - file.emit('limit') - return - } else if (!file.push(data)) { self._pause = true } +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' - file.bytesRead = nsize - } +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' - onEnd = function () { - curFile = undefined - file.push(null) - } - } else { - // non-file field - if (nfields === fieldsLimit) { - if (!boy.hitFieldsLimit) { - boy.hitFieldsLimit = true - boy.emit('fieldsLimit') - } - return skipPart(part) - } +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. - ++nfields - ++nends - let buffer = '' - let truncated = false - curField = part +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' - onData = function (data) { - if ((nsize += data.length) > fieldSizeLimit) { - const extralen = (fieldSizeLimit - (nsize - data.length)) - buffer += data.toString('binary', 0, extralen) - truncated = true - part.removeAllListeners('data') - } else { buffer += data.toString('binary') } - } +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. - onEnd = function () { - curField = undefined - if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } - boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) - --nends - checkFinished() - } - } +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' - /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become - broken. Streams2/streams3 is a huge black box of confusion, but - somehow overriding the sync state seems to fix things again (and still - seems to work for previous node versions). - */ - part._readableState.sync = false +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. - part.on('data', onData) - part.on('end', onEnd) - }).on('error', function (err) { - if (curFile) { curFile.emit('error', err) } - }) - }).on('error', function (err) { - boy.emit('error', err) - }).on('finish', function () { - finished = true - checkFinished() - }) -} +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. -Multipart.prototype.write = function (chunk, cb) { - const r = this.parser.write(chunk) - if (r && !this._pause) { - cb() - } else { - this._needDrain = !r - this._cb = cb - } -} +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' -Multipart.prototype.end = function () { - const self = this +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' - if (self.parser.writable) { - self.parser.end() - } else if (!self._boy._done) { - process.nextTick(function () { - self._boy._done = true - self._boy.emit('finish') - }) - } -} +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' -function skipPart (part) { - part.resume() -} +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' -function FileStream (opts) { - Readable.call(this, opts) +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' - this.bytesRead = 0 +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' - this.truncated = false -} +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' -inherits(FileStream, Readable) +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' -FileStream.prototype._read = function (n) {} +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' -module.exports = Multipart +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' -/***/ }), +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' -/***/ 16780: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' -"use strict"; +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') +var caretTrimReplace = '$1^' -const Decoder = __nccwpck_require__(89730) -const decodeText = __nccwpck_require__(99136) -const getLimit = __nccwpck_require__(49692) +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' -const RE_CHARSET = /^charset$/i +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' -UrlEncoded.detect = /^application\/x-www-form-urlencoded/i -function UrlEncoded (boy, cfg) { - const limits = cfg.limits - const parsedConType = cfg.parsedConType - this.boy = boy +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' - this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) - this.fieldsLimit = getLimit(limits, 'fields', Infinity) +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' - let charset - for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var - if (Array.isArray(parsedConType[i]) && - RE_CHARSET.test(parsedConType[i][0])) { - charset = parsedConType[i][1].toLowerCase() - break - } - } +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' - if (charset === undefined) { charset = cfg.defCharset || 'utf8' } +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' - this.decoder = new Decoder() - this.charset = charset - this._fields = 0 - this._state = 'key' - this._checkingBytes = true - this._bytesKey = 0 - this._bytesVal = 0 - this._key = '' - this._val = '' - this._keyTrunc = false - this._valTrunc = false - this._hitLimit = false +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) + } } -UrlEncoded.prototype.write = function (data, cb) { - if (this._fields === this.fieldsLimit) { - if (!this.boy.hitFieldsLimit) { - this.boy.hitFieldsLimit = true - this.boy.emit('fieldsLimit') +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - return cb() } - let idxeq; let idxamp; let i; let p = 0; const len = data.length - - while (p < len) { - if (this._state === 'key') { - idxeq = idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x3D/* = */) { - idxeq = i - break - } else if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesKey } - } - - if (idxeq !== undefined) { - // key with assignment - if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } - this._state = 'val' + if (version instanceof SemVer) { + return version + } - this._hitLimit = false - this._checkingBytes = true - this._val = '' - this._bytesVal = 0 - this._valTrunc = false - this.decoder.reset() + if (typeof version !== 'string') { + return null + } - p = idxeq + 1 - } else if (idxamp !== undefined) { - // key with no assignment - ++this._fields - let key; const keyTrunc = this._keyTrunc - if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + if (version.length > MAX_LENGTH) { + return null + } - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] + if (!r.test(version)) { + return null + } - if (key.length) { - this.boy.emit('field', decodeText(key, 'binary', this.charset), - '', - keyTrunc, - false) - } + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._keyTrunc = true - } - } else { - if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } - p = len - } - } else { - idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesVal } - } +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} - if (idxamp !== undefined) { - ++this._fields - if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - this._state = 'key' +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() +exports.SemVer = SemVer - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._val === '' && this.fieldSizeLimit === 0) || - (this._bytesVal = this._val.length) === this.fieldSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._valTrunc = true - } - } else { - if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } - p = len - } +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } } - cb() -} - -UrlEncoded.prototype.end = function () { - if (this.boy._done) { return } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } - if (this._state === 'key' && this._key.length > 0) { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - '', - this._keyTrunc, - false) - } else if (this._state === 'val') { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') } - this.boy._done = true - this.boy.emit('finish') -} -module.exports = UrlEncoded + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose -/***/ }), + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) -/***/ 89730: -/***/ ((module) => { + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } -"use strict"; + this.raw = version + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] -const RE_PLUS = /\+/g + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } -const HEX = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 -] + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } -function Decoder () { - this.buffer = undefined -} -Decoder.prototype.write = function (str) { - // Replace '+' with ' ' before decoding - str = str.replace(RE_PLUS, ' ') - let res = '' - let i = 0; let p = 0; const len = str.length - for (; i < len; ++i) { - if (this.buffer !== undefined) { - if (!HEX[str.charCodeAt(i)]) { - res += '%' + this.buffer - this.buffer = undefined - --i // retry character - } else { - this.buffer += str[i] - ++p - if (this.buffer.length === 2) { - res += String.fromCharCode(parseInt(this.buffer, 16)) - this.buffer = undefined + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num } } - } else if (str[i] === '%') { - if (i > p) { - res += str.substring(p, i) - p = i - } - this.buffer = '' - ++p - } + return id + }) } - if (p < len && this.buffer === undefined) { res += str.substring(p) } - return res + + this.build = m[5] ? m[5].split('.') : [] + this.format() } -Decoder.prototype.reset = function () { - this.buffer = undefined + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version } -module.exports = Decoder +SemVer.prototype.toString = function () { + return this.version +} +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -/***/ }), + return this.compareMain(other) || this.comparePre(other) +} -/***/ 60496: -/***/ ((module) => { +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -"use strict"; + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -module.exports = function basename (path) { - if (typeof path !== 'string') { return '' } - for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var - switch (path.charCodeAt(i)) { - case 0x2F: // '/' - case 0x5C: // '\' - path = path.slice(i + 1) - return (path === '..' || path === '.' ? '' : path) - } + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 } - return (path === '..' || path === '.' ? '' : path) + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) } +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -/***/ }), + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} -/***/ 99136: -/***/ ((module) => { +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break -"use strict"; + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } -// Node has always utf-8 -const utf8Decoder = new TextDecoder('utf-8') -const textDecoders = new Map([ - ['utf-8', utf8Decoder], - ['utf8', utf8Decoder] -]) + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} -function decodeText (text, textEncoding, destEncoding) { - if (text) { - if (textDecoders.has(destEncoding)) { - try { - return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) - } catch (e) { } - } else { - try { - textDecoders.set(destEncoding, new TextDecoder(destEncoding)) - return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) - } catch (e) { } +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } } + return defaultResult // may be undefined } - return text } -module.exports = decodeText - +exports.compareIdentifiers = compareIdentifiers -/***/ }), +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) -/***/ 49692: -/***/ ((module) => { + if (anum && bnum) { + a = +a + b = +b + } -"use strict"; + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} -module.exports = function getLimit (limits, name, defaultLimit) { - if ( - !limits || - limits[name] === undefined || - limits[name] === null - ) { return defaultLimit } +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} - if ( - typeof limits[name] !== 'number' || - isNaN(limits[name]) - ) { throw new TypeError('Limit ' + name + ' is not a valid number') } +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} - return limits[name] +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch } +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} -/***/ }), +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} -/***/ 34426: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} -"use strict"; +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} -const decodeText = __nccwpck_require__(99136) +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} -const RE_ENCODED = /%([a-fA-F0-9]{2})/g +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} -function encodedReplacer (match, byte) { - return String.fromCharCode(parseInt(byte, 16)) +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 } -function parseParams (str) { - const res = [] - let state = 'key' - let charset = '' - let inquote = false - let escaping = false - let p = 0 - let tmp = '' +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} - for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var - const char = str[i] - if (char === '\\' && inquote) { - if (escaping) { escaping = false } else { - escaping = true - continue - } - } else if (char === '"') { - if (!escaping) { - if (inquote) { - inquote = false - state = 'key' - } else { inquote = true } - continue - } else { escaping = false } - } else { - if (escaping && inquote) { tmp += '\\' } - escaping = false - if ((state === 'charset' || state === 'lang') && char === "'") { - if (state === 'charset') { - state = 'lang' - charset = tmp.substring(1) - } else { state = 'value' } - tmp = '' - continue - } else if (state === 'key' && - (char === '*' || char === '=') && - res.length) { - if (char === '*') { state = 'charset' } else { state = 'value' } - res[p] = [tmp, undefined] - tmp = '' - continue - } else if (!inquote && char === ';') { - state = 'key' - if (charset) { - if (tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } - charset = '' - } else if (tmp.length) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } - tmp = '' - ++p - continue - } else if (!inquote && (char === ' ' || char === '\t')) { continue } - } - tmp += char - } - if (charset && tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } else if (tmp) { - tmp = decodeText(tmp, 'binary', 'utf8') - } +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} - if (res[p] === undefined) { - if (tmp) { res[p] = tmp } - } else { res[p][1] = tmp } +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} - return res +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 } -module.exports = parseParams +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b -/***/ }), + case '': + case '=': + case '==': + return eq(a, b, loose) -/***/ 46412: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + case '!=': + return neq(a, b, loose) -"use strict"; + case '>': + return gt(a, b, loose) -/*! - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ResourceStream = exports.paginator = exports.Paginator = void 0; -/*! - * @module common/paginator - */ -const arrify = __nccwpck_require__(61546); -const extend = __nccwpck_require__(38171); -const resource_stream_1 = __nccwpck_require__(72199); -Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); -/*! Developer Documentation - * - * paginator is used to auto-paginate `nextQuery` methods as well as - * streamifying them. - * - * Before: - * - * search.query('done=true', function(err, results, nextQuery) { - * search.query(nextQuery, function(err, results, nextQuery) {}); - * }); - * - * After: - * - * search.query('done=true', function(err, results) {}); - * - * Methods to extend should be written to accept callbacks and return a - * `nextQuery`. - */ -class Paginator { - /** - * Cache the original method, then overwrite it on the Class's prototype. - * - * @param {function} Class - The parent class of the methods to extend. - * @param {string|string[]} methodNames - Name(s) of the methods to extend. - */ - // tslint:disable-next-line:variable-name - extend(Class, methodNames) { - methodNames = arrify(methodNames); - methodNames.forEach(methodName => { - const originalMethod = Class.prototype[methodName]; - // map the original method to a private member - Class.prototype[methodName + '_'] = originalMethod; - // overwrite the original to auto-paginate - /* eslint-disable @typescript-eslint/no-explicit-any */ - Class.prototype[methodName] = function (...args) { - const parsedArguments = paginator.parseArguments_(args); - return paginator.run_(parsedArguments, originalMethod.bind(this)); - }; - }); - } - /** - * Wraps paginated API calls in a readable object stream. - * - * This method simply calls the nextQuery recursively, emitting results to a - * stream. The stream ends when `nextQuery` is null. - * - * `maxResults` will act as a cap for how many results are fetched and emitted - * to the stream. - * - * @param {string} methodName - Name of the method to streamify. - * @return {function} - Wrapped function. - */ - /* eslint-disable @typescript-eslint/no-explicit-any */ - streamify(methodName) { - return function ( - /* eslint-disable @typescript-eslint/no-explicit-any */ - ...args) { - const parsedArguments = paginator.parseArguments_(args); - const originalMethod = this[methodName + '_'] || this[methodName]; - return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); - }; - } - /** - * Parse a pseudo-array `arguments` for a query and callback. - * - * @param {array} args - The original `arguments` pseduo-array that the original - * method received. - */ - /* eslint-disable @typescript-eslint/no-explicit-any */ - parseArguments_(args) { - let query; - let autoPaginate = true; - let maxApiCalls = -1; - let maxResults = -1; - let callback; - const firstArgument = args[0]; - const lastArgument = args[args.length - 1]; - if (typeof firstArgument === 'function') { - callback = firstArgument; - } - else { - query = firstArgument; - } - if (typeof lastArgument === 'function') { - callback = lastArgument; - } - if (typeof query === 'object') { - query = extend(true, {}, query); - // Check if the user only asked for a certain amount of results. - if (query.maxResults && typeof query.maxResults === 'number') { - // `maxResults` is used API-wide. - maxResults = query.maxResults; - } - else if (typeof query.pageSize === 'number') { - // `pageSize` is Pub/Sub's `maxResults`. - maxResults = query.pageSize; - } - if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { - maxApiCalls = query.maxApiCalls; - delete query.maxApiCalls; - } - // maxResults is the user specified limit. - if (maxResults !== -1 || query.autoPaginate === false) { - autoPaginate = false; - } - } - const parsedArguments = { - query: query || {}, - autoPaginate, - maxApiCalls, - maxResults, - callback, - }; - parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); - delete parsedArguments.streamOptions.autoPaginate; - delete parsedArguments.streamOptions.maxResults; - delete parsedArguments.streamOptions.pageSize; - return parsedArguments; - } - /** - * This simply checks to see if `autoPaginate` is set or not, if it's true - * then we buffer all results, otherwise simply call the original method. - * - * @param {array} parsedArguments - Parsed arguments from the original method - * call. - * @param {object=|string=} parsedArguments.query - Query object. This is most - * commonly an object, but to make the API more simple, it can also be a - * string in some places. - * @param {function=} parsedArguments.callback - Callback function. - * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. - * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. - * @param {number} parsedArguments.maxResults - Maximum results to return. - * @param {function} originalMethod - The cached method that accepts a callback - * and returns `nextQuery` to receive more results. - */ - run_(parsedArguments, originalMethod) { - const query = parsedArguments.query; - const callback = parsedArguments.callback; - if (!parsedArguments.autoPaginate) { - return originalMethod(query, callback); - } - const results = new Array(); - let otherArgs = []; - const promise = new Promise((resolve, reject) => { - const stream = paginator.runAsStream_(parsedArguments, originalMethod); - stream - .on('error', reject) - .on('data', (data) => results.push(data)) - .on('end', () => { - otherArgs = stream._otherArgs || []; - resolve(results); - }); - }); - if (!callback) { - return promise.then(results => [results, query, ...otherArgs]); - } - promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - /** - * This method simply calls the nextQuery recursively, emitting results to a - * stream. The stream ends when `nextQuery` is null. - * - * `maxResults` will act as a cap for how many results are fetched and emitted - * to the stream. - * - * @param {object=|string=} parsedArguments.query - Query object. This is most - * commonly an object, but to make the API more simple, it can also be a - * string in some places. - * @param {function=} parsedArguments.callback - Callback function. - * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. - * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. - * @param {number} parsedArguments.maxResults - Maximum results to return. - * @param {function} originalMethod - The cached method that accepts a callback - * and returns `nextQuery` to receive more results. - * @return {stream} - Readable object stream. - */ - /* eslint-disable @typescript-eslint/no-explicit-any */ - runAsStream_(parsedArguments, originalMethod) { - return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) } -exports.Paginator = Paginator; -const paginator = new Paginator(); -exports.paginator = paginator; -//# sourceMappingURL=index.js.map -/***/ }), +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var m = comp.match(r) -/***/ 72199: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } -"use strict"; + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } -/*! - * Copyright 2019 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ResourceStream = void 0; -const stream_1 = __nccwpck_require__(12781); -class ResourceStream extends stream_1.Transform { - constructor(args, requestFn) { - const options = Object.assign({ objectMode: true }, args.streamOptions); - super(options); - this._ended = false; - this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; - this._nextQuery = args.query; - this._reading = false; - this._requestFn = requestFn; - this._requestsMade = 0; - this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; - this._otherArgs = []; - } - /* eslint-disable @typescript-eslint/no-explicit-any */ - end(...args) { - this._ended = true; - return super.end(...args); - } - _read() { - if (this._reading) { - return; - } - this._reading = true; - // Wrap in a try/catch to catch input linting errors, e.g. - // an invalid BigQuery query. These errors are thrown in an - // async fashion, which makes them un-catchable by the user. - try { - this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { - if (err) { - this.destroy(err); - return; - } - this._otherArgs = otherArgs; - this._nextQuery = nextQuery; - if (this._resultsToSend !== Infinity) { - results = results.splice(0, this._resultsToSend); - this._resultsToSend -= results.length; - } - let more = true; - for (const result of results) { - if (this._ended) { - break; - } - more = this.push(result); - } - const isFinished = !this._nextQuery || this._resultsToSend < 1; - const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; - if (isFinished || madeMaxCalls) { - this.end(); - } - if (more && !this._ended) { - setImmediate(() => this._read()); - } - this._reading = false; - }); - } - catch (e) { - this.destroy(e); - } - } + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } } -exports.ResourceStream = ResourceStream; -//# sourceMappingURL=resource-stream.js.map -/***/ }), +Comparator.prototype.toString = function () { + return this.value +} -/***/ 3497: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) -"use strict"; + if (this.semver === ANY || version === ANY) { + return true + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; -const stream_1 = __nccwpck_require__(12781); -// Copyright 2014 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -/** - * Populate the `{{projectId}}` placeholder. - * - * @throws {Error} If a projectId is required, but one is not provided. - * - * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. - * @param {string} projectId - A projectId. If not provided - * @return {*} - The original argument with all placeholders populated. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function replaceProjectIdToken(value, projectId) { - if (Array.isArray(value)) { - value = value.map(v => replaceProjectIdToken(v, projectId)); + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false } - if (value !== null && - typeof value === 'object' && - !(value instanceof Buffer) && - !(value instanceof stream_1.Stream) && - typeof value.hasOwnProperty === 'function') { - for (const opt in value) { - // eslint-disable-next-line no-prototype-builtins - if (value.hasOwnProperty(opt)) { - value[opt] = replaceProjectIdToken(value[opt], projectId); - } - } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - if (typeof value === 'string' && - value.indexOf('{{projectId}}') > -1) { - if (!projectId || projectId === '{{projectId}}') { - throw new MissingProjectIdError(); - } - value = value.replace(/{{projectId}}/g, projectId); + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true } - return value; -} -exports.replaceProjectIdToken = replaceProjectIdToken; -/** - * Custom error type for missing project ID errors. - */ -class MissingProjectIdError extends Error { - constructor() { - super(...arguments); - this.message = `Sorry, we cannot connect to Cloud Services without a project - ID. You may specify one with an environment variable named - "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan } -exports.MissingProjectIdError = MissingProjectIdError; -//# sourceMappingURL=index.js.map -/***/ }), +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } -/***/ 19203: -/***/ ((__unused_webpack_module, exports) => { + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } -"use strict"; + if (range instanceof Comparator) { + return new Range(range.value, options) + } -/* eslint-disable prefer-rest-params */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; -/** - * Wraps a callback style function to conditionally return a promise. - * - * @param {function} originalMethod - The method to promisify. - * @param {object=} options - Promise options. - * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. - * @return {function} wrapped - */ -function promisify(originalMethod, options) { - if (originalMethod.promisified_) { - return originalMethod; - } - options = options || {}; - const slice = Array.prototype.slice; - // tslint:disable-next-line:no-any - const wrapper = function () { - let last; - for (last = arguments.length - 1; last >= 0; last--) { - const arg = arguments[last]; - if (typeof arg === 'undefined') { - continue; // skip trailing undefined. - } - if (typeof arg !== 'function') { - break; // non-callback last argument found. - } - return originalMethod.apply(this, arguments); - } - // peel trailing undefined. - const args = slice.call(arguments, 0, last + 1); - // tslint:disable-next-line:variable-name - let PromiseCtor = Promise; - // Because dedupe will likely create a single install of - // @google-cloud/common to be shared amongst all modules, we need to - // localize it at the Service level. - if (this && this.Promise) { - PromiseCtor = this.Promise; - } - return new PromiseCtor((resolve, reject) => { - // tslint:disable-next-line:no-any - args.push((...args) => { - const callbackArgs = slice.call(args); - const err = callbackArgs.shift(); - if (err) { - return reject(err); - } - if (options.singular && callbackArgs.length === 1) { - resolve(callbackArgs[0]); - } - else { - resolve(callbackArgs); - } - }); - originalMethod.apply(this, args); - }); - }; - wrapper.promisified_ = true; - return wrapper; -} -exports.promisify = promisify; -/** - * Promisifies certain Class methods. This will not promisify private or - * streaming methods. - * - * @param {module:common/service} Class - Service class. - * @param {object=} options - Configuration object. - */ -// tslint:disable-next-line:variable-name -function promisifyAll(Class, options) { - const exclude = (options && options.exclude) || []; - const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); - const methods = ownPropertyNames.filter(methodName => { - // clang-format off - return (!exclude.includes(methodName) && - typeof Class.prototype[methodName] === 'function' && // is it a function? - !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? - ); - // clang-format on - }); - methods.forEach(methodName => { - const originalMethod = Class.prototype[methodName]; - if (!originalMethod.promisified_) { - Class.prototype[methodName] = exports.promisify(originalMethod, options); - } - }); -} -exports.promisifyAll = promisifyAll; -/** - * Wraps a promisy type function to conditionally call a callback function. - * - * @param {function} originalMethod - The method to callbackify. - * @param {object=} options - Callback options. - * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. - * @return {function} wrapped - */ -function callbackify(originalMethod) { - if (originalMethod.callbackified_) { - return originalMethod; - } - // tslint:disable-next-line:no-any - const wrapper = function () { - if (typeof arguments[arguments.length - 1] !== 'function') { - return originalMethod.apply(this, arguments); - } - const cb = Array.prototype.pop.call(arguments); - originalMethod.apply(this, arguments).then( - // tslint:disable-next-line:no-any - (res) => { - res = Array.isArray(res) ? res : [res]; - cb(null, ...res); - }, (err) => cb(err)); - }; - wrapper.callbackified_ = true; - return wrapper; -} -exports.callbackify = callbackify; -/** - * Callbackifies certain Class methods. This will not callbackify private or - * streaming methods. - * - * @param {module:common/service} Class - Service class. - * @param {object=} options - Configuration object. - */ -function callbackifyAll( -// tslint:disable-next-line:variable-name -Class, options) { - const exclude = (options && options.exclude) || []; - const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); - const methods = ownPropertyNames.filter(methodName => { - // clang-format off - return (!exclude.includes(methodName) && - typeof Class.prototype[methodName] === 'function' && // is it a function? - !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? - ); - // clang-format on - }); - methods.forEach(methodName => { - const originalMethod = Class.prototype[methodName]; - if (!originalMethod.callbackified_) { - Class.prototype[methodName] = exports.callbackify(originalMethod); - } - }); -} -exports.callbackifyAll = callbackifyAll; -//# sourceMappingURL=index.js.map - -/***/ }), + if (!(this instanceof Range)) { + return new Range(range, options) + } -/***/ 44458: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease -"use strict"; + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(33542)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(29411)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(83424)); -var _v4 = _interopRequireDefault(__nccwpck_require__(64051)); + this.format() +} -var _nil = _interopRequireDefault(__nccwpck_require__(46570)); +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} -var _version = _interopRequireDefault(__nccwpck_require__(35611)); +Range.prototype.toString = function () { + return this.range +} -var _validate = _interopRequireDefault(__nccwpck_require__(20937)); +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) -var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) -var _parse = _interopRequireDefault(__nccwpck_require__(99645)); + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + // normalize spaces + range = range.split(/\s+/).join(' ') -/***/ }), + // At this point, the range is completely trimmed and + // ready to be split into comparators. -/***/ 84953: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) -"use strict"; + return set +} +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); + testComparator = remainingComparators.pop() } - return _crypto.default.createHash('md5').update(bytes).digest(); + return result } -var _default = md5; -exports["default"] = _default; - -/***/ }), +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} -/***/ 46570: -/***/ ((__unused_webpack_module, exports) => { +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} -"use strict"; +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret -/***/ }), + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } -/***/ 99645: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + debug('tilde return', ret) + return ret + }) +} -"use strict"; +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } -var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + debug('caret return', ret) + return ret + }) +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + if (gtlt === '=' && anyX) { + gtlt = '' + } - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } -var _default = parse; -exports["default"] = _default; + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } -/***/ }), + debug('xRange return', ret) -/***/ 94323: -/***/ ((__unused_webpack_module, exports) => { + return ret + }) +} -"use strict"; +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[t.STAR], '') +} +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } -/***/ }), + return (from + ' ' + to).trim() +} -/***/ 91430: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } -"use strict"; + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + // Version has a -pre, but it's not one of the ones we like. + return false + } -let poolPtr = rnds8Pool.length; + return true +} -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} - poolPtr = 0; +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} - return rnds8Pool.slice(poolPtr, poolPtr += 16); +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min } -/***/ }), +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) -/***/ 77416: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } -"use strict"; + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + if (minver && range.test(minver)) { + return minver + } -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + return null +} -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null } +} - return _crypto.default.createHash('sha1').update(bytes).digest(); +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) } -var _default = sha1; -exports["default"] = _default; +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} -/***/ }), +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) -/***/ 32122: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } -"use strict"; + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + var high = null + var low = null -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true } -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version } - return uuid; + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) } -var _default = stringify; -exports["default"] = _default; /***/ }), -/***/ 33542: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7701: +/***/ ((module) => { -"use strict"; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +module.exports = bytesToUuid; -var _rng = _interopRequireDefault(__nccwpck_require__(91430)); -var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 37269: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. -let _clockseq; // Previous uuid creation time +var crypto = __nccwpck_require__(6113); +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval +/***/ }), +/***/ 17468: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested +var rng = __nccwpck_require__(37269); +var bytesToUuid = __nccwpck_require__(7701); +function v4(options, buf, offset) { + var i = buf && offset || 0; - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; } + options = options || {}; - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + var rnds = options.random || (options.rng || rng)(); - b[i++] = clockseq & 0xff; // `node` + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } } - return buf || (0, _stringify.default)(b); + return buf || bytesToUuid(rnds); } -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 29411: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(74624)); - -var _md = _interopRequireDefault(__nccwpck_require__(84953)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +module.exports = v4; -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; /***/ }), -/***/ 74624: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 4654: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ - value: true +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; })); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); - -var _parse = _interopRequireDefault(__nccwpck_require__(99645)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const tls = __importStar(__nccwpck_require__(24404)); +const debug_1 = __importDefault(__nccwpck_require__(38237)); +const events_1 = __nccwpck_require__(82361); +const agent_base_1 = __nccwpck_require__(70694); +const url_1 = __nccwpck_require__(57310); +const debug = (0, debug_1.default)('http-proxy-agent'); +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); + addRequest(req, opts) { + req._header = null; + this.setRequestProps(req, opts); + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + super.addRequest(req, opts); } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; + setRequestProps(req, opts) { + const { proxy } = this; + const protocol = opts.secureEndpoint ? 'https:' : 'http:'; + const hostname = req.getHeader('host') || 'localhost'; + const base = `${protocol}//${hostname}`; + const url = new url_1.URL(req.path, base); + if (opts.port !== 80) { + url.port = String(opts.port); + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = String(url); + // Inject the `Proxy-Authorization` header if necessary. + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + const value = headers[name]; + if (value) { + req.setHeader(name, value); + } + } + } + async connect(req, opts) { + req._header = null; + if (!req.path.includes('://')) { + this.setRequestProps(req, opts); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._implicitHeader(); + if (req.outputData && req.outputData.length > 0) { + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + // Create a socket connection to the proxy server. + let socket; + if (this.proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(this.connectOpts); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + await (0, events_1.once)(socket, 'connect'); + return socket; } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; } - -/***/ }), - -/***/ 83424: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(91430)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; +HttpProxyAgent.protocols = ['http', 'https']; +exports.HttpProxyAgent = HttpProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } } - - return buf; - } - - return (0, _stringify.default)(rnds); + return ret; } - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 64051: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(74624)); - -var _sha = _interopRequireDefault(__nccwpck_require__(77416)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 20937: +/***/ 84100: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(94323)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +Object.defineProperty(exports, "__esModule", ({ value: true })); -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); +var coreRestPipeline = __nccwpck_require__(29146); +var tslib = __nccwpck_require__(4351); +var coreAuth = __nccwpck_require__(98834); +var coreUtil = __nccwpck_require__(80637); +var coreHttpCompat = __nccwpck_require__(25083); +var coreClient = __nccwpck_require__(7611); +var coreXml = __nccwpck_require__(17309); +var logger$1 = __nccwpck_require__(89497); +var abortController = __nccwpck_require__(1753); +var crypto = __nccwpck_require__(6113); +var coreTracing = __nccwpck_require__(19363); +var stream = __nccwpck_require__(12781); +var coreLro = __nccwpck_require__(90334); +var events = __nccwpck_require__(82361); +var fs = __nccwpck_require__(57147); +var util = __nccwpck_require__(73837); +var buffer = __nccwpck_require__(14300); + +function _interopNamespaceDefault(e) { + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n.default = e; + return Object.freeze(n); } -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 35611: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(20937)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat); +var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient); +var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs); +var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util); -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The `@azure/logger` configuration for this package. + */ +const logger = logger$1.createClientLogger("storage-blob"); - return parseInt(uuid.substr(14, 1), 16); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The base class from which all request policies derive. + */ +class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } } -var _default = version; -exports["default"] = _default; - -/***/ }), - -/***/ 40334: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const SDK_VERSION = "12.26.0"; +const SERVICE_VERSION = "2025-01-05"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +const REQUEST_TIMEOUT = 100 * 1000; // In ms +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-copy-source-error-code", + "x-ms-copy-source-status-code", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path || "/"; + path = escape(path); + urlParsed.pathname = path; + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; } - /** - * Prefix token for usage in the Authorization header + * Extracts the parts of an Azure Storage account connection string. * - * @param token OAuth token or JSON Web Token + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + // client constructors assume accountSas does *not* start with ? + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } } - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" } - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 76762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var universalUserAgent = __nccwpck_require__(45030); -var beforeAfterHook = __nccwpck_require__(83682); -var request = __nccwpck_require__(36234); -var graphql = __nccwpck_require__(88467); -var authToken = __nccwpck_require__(40334); - -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; - - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; - } - - return target; -} - -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; - - var target = _objectWithoutPropertiesLoose(source, excluded); - - var key, i; - - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); - - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; - } - } - - return target; +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.pathname = path; + return urlParsed.toString(); } - -const VERSION = "3.6.0"; - -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : undefined; + // mutating searchParams will change the encoding, so we have to do this ourselves + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } + } } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - + const urlParsed = new URL(url); + return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; } -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 59440: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var isPlainObject = __nccwpck_require__(63287); -var universalUserAgent = __nccwpck_require__(45030); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); } - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; + } + catch (e) { + return undefined; } - }); - return result; } - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } + catch (e) { + return undefined; } - } - - return obj; } - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; } - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; } - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); } - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; } - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); } - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); } - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); } - -function isDefined(value) { - return value !== undefined && value !== null; +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } } - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.hostname.split(".")[0]; } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.pathname.split("/")[1]; } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; } - } + return accountName; } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); + catch (error) { + throw new Error("Unable to extract accountName with provided information."); } - } - - return result; } - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; +function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); } - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; } - }); + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); } - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; } - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } } - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} +/** + * Escape the blobName but keep path separator ('/'). + */ +function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); +} +/** + * A typesafe helper for ensuring that a given response object has + * the original _response attached. + * @param response - A response object from calling a client operation + * @returns The same object, but with known _response property + */ +function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); } -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS$1 = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 88467: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(36234); -var universalUserAgent = __nccwpck_require__(45030); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); +const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS$1.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS$1.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. + // if (response) { + // // Retry select Copy Source Error Codes. + // if (response?.status >= 400) { + // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); + // if (copySourceError !== undefined) { + // switch (copySourceError) { + // case "InternalError": + // case "OperationTimedOut": + // case "ServerBusy": + // return true; + // } + // } + // } + // } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); + } } -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); } - } - } -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; } - } +} - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/* + * We need to imitate .Net culture-aware sorting, which is used in storage service. + * Below tables contain sort-keys for en-US culture. + */ +const table_lv0 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, + 0x723, 0x725, 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, + 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe02, 0xe09, 0xe0a, + 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, + 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, + 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, + 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, + 0x0, 0x750, 0x0, +]); +const table_lv2 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, + 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, + 0x12, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +]); +const table_lv4 = new Uint32Array([ + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +]); +function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; +} +function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; + } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 0x1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 0x1; + if (weight1 === 0x1 && weight2 === 0x1) { + i = 0; + j = 0; + ++curr_level; + } + else if (weight1 === weight2) { + ++i; + ++j; + } + else if (weight1 === 0) { + ++i; + } + else if (weight2 === 0) { + ++j; + } + else { + return weight1 < weight2; + } } + return false; +} - if (!result.variables) { - result.variables = {}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; } - - return response.data.data; - }); } -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } } -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } } -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 64193: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.21.3"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } } -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } } -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +let _defaultHttpClient; +function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); + } + return _defaultHttpClient; +} - return obj; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The programmatic identifier of the StorageBrowserPolicy. + */ +const storageBrowserPolicyName = "storageBrowserPolicy"; +/** + * storageBrowserPolicy is a policy used to prevent browsers from caching requests + * and to remove cookies and explicit content-length headers. + */ +function storageBrowserPolicy() { + return { + name: storageBrowserPolicyName, + async sendRequest(request, next) { + if (coreUtil.isNode) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.delete(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.delete(HeaderConstants.CONTENT_LENGTH); + return next(request); + }, + }; } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. /** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + * Name of the {@link storageRetryPolicy} */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; +const storageRetryPolicyName = "storageRetryPolicy"; +/** + * RetryPolicy types. + */ +var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", +]; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +function storageRetryPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f; + const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { + var _a, _b; + if (attempt >= maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error) { + for (const retriableError of retriableErrors) { + if (error.name.toUpperCase().includes(retriableError) || + error.message.toUpperCase().includes(retriableError) || + (error.code && error.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && + (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || error) { + const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. + // if (response) { + // // Retry select Copy Source Error Codes. + // if (response?.status >= 400) { + // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); + // if (copySourceError !== undefined) { + // switch (copySourceError) { + // case "InternalError": + // case "OperationTimedOut": + // case "ServerBusy": + // return true; + // } + // } + // } + // } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: storageRetryPolicyName, + async sendRequest(request, next) { + // Set the server-side timeout query parameter "timeout=[seconds]" + if (tryTimeoutInMs) { + request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || + !secondaryUrl || + !["GET", "HEAD", "OPTIONS"].includes(request.method) || + attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = undefined; + error = undefined; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (e) { + if (coreRestPipeline.isRestError(e)) { + logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error = e; + } + else { + logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); + if (retryAgain) { + await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; + } + if (response) { + return response; + } + throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + }, + }; } -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The programmatic identifier of the storageSharedKeyCredentialPolicy. + */ +const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; +/** + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. + */ +function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, HeaderConstants.DATE), + getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + getCanonicalizedHeadersString(request) + + getCanonicalizedResourceString(request); + const signature = crypto.createHmac("sha256", options.accountKey) + .update(stringToSign, "utf8") + .digest("base64"); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + */ + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + */ + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); } - }; } - } - - }) - }; + headersArray.sort((a, b) => { + return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + }, + }; } -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreUtil.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } } -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); } +} - let earlyExit = false; - - function done() { - earlyExit = true; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * The programmatic identifier of the storageCorrectContentLengthPolicy. + */ +const storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; +/** + * storageCorrectContentLengthPolicy to correctly set Content-Length header with request body length. + */ +function storageCorrectContentLengthPolicy() { + function correctContentLength(request) { + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } } + return { + name: storageCorrectContentLengthPolicyName, + async sendRequest(request, next) { + correctContentLength(request); + return next(request); + }, + }; +} - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; } - - return gather(octokit, results, iterator, mapFn); - }); + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); } - -const composePaginateRest = Object.assign(paginate, { - iterator -}); - -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + this.options = options; + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } } - /** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; +function newPipeline(credential, pipelineOptions = {}) { + if (!credential) { + credential = new AnonymousCredential(); + } + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; } -paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 83044: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); +function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory, + ]; + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); + }); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + // if there are any left over, wrap in a requestPolicyFactoryPolicy + return { + wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + afterRetry: hasInjector, + }; + } } - - keys.push.apply(keys, symbols); - } - - return keys; + return undefined; } - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); +function getCoreClientOptions(pipeline) { + var _a; + const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix + ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { + additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + logger: logger.info, + }, userAgentOptions: { + userAgentPrefix, + }, serializationOptions: { + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + }, deserializationOptions: { + parseXML: coreXml.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + } })); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); + corePipeline.addPolicy(storageCorrectContentLengthPolicy()); + corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy(storageBrowserPolicy()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); + } + const credential = getCredentialFromPipeline(pipeline); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + pipeline._corePipeline = corePipeline; + } + return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); +} +function getCredentialFromPipeline(pipeline) { + // see if we squirreled one away on the type itself + if (pipeline._credential) { + return pipeline._credential; + } + // if it came from another package, loop over the factories and look for one like before + let credential = new AnonymousCredential(); + for (const factory of pipeline.factories) { + if (coreAuth.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + credential = factory.credential; + } + else if (isStorageSharedKeyCredential(factory)) { + return factory; + } + } + return credential; +} +function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential) { + return true; } - } - - return target; + return factory.constructor.name === "StorageSharedKeyCredential"; } - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true +function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential) { + return true; + } + return factory.constructor.name === "AnonymousCredential"; +} +function isCoreHttpBearerTokenFactory(factory) { + return coreAuth.isTokenCredential(factory.credential); +} +function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageBrowserPolicyFactory"; +} +function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageRetryPolicyFactory"; +} +function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; +} +function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; +} +function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy", + ]; + const mockHttpClient = { + sendRequest: async (request) => { + return { + request, + headers: request.headers.clone(), + status: 500, + }; + }, + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + // bundlers sometimes add a custom suffix to the class name to make it unique + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); }); - } else { - obj[key] = value; - } - - return obj; } -const Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], - addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], - deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], - disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], - enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], - getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], - getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], - getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], - getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], - getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], - getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], - getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { - renamed: ["actions", "getGithubActionsPermissionsRepository"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], - listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], - listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], - removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], - setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], - setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], - setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], - setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], - setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], - setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], - setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], - setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], - setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], - setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { - renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] - }], - addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging", + }, + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule", + }, + }, + }, + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String", + }, + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + }, + }, + }, + }, +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String", + }, + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean", + }, + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean", + }, + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, + }, +}; +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + days: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number", + }, + }, + }, + }, +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", + }, + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, + }, +}; +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String", + }, + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String", + }, + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String", + }, + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String", + }, + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number", + }, + }, + }, + }, +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String", + }, + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String", + }, + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String", + }, + }, + }, + }, +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String", + }, + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String", + }, + }, + authenticationErrorDetail: { + serializedName: "AuthenticationErrorDetail", + xmlName: "AuthenticationErrorDetail", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + }, + }, + }, + }, +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"], + }, + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean", + }, + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", + }, + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties", + }, + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + }, + }, +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String", + }, + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String", + }, + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean", + }, + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123", + }, + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String", + }, + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String", + }, + }, + }, + }, +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String", + }, + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String", + }, + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String", + }, + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String", + }, + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String", + }, + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String", + }, + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String", + }, + }, + }, + }, +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String", + }, + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String", + }, + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + }, + }, + }, + }, +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag", + }, + }, + }, + }, + }, + }, +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String", + }, + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String", + }, + }, + }, + }, +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String", + }, + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + }, + }, + }, + }, +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String", + }, + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String", + }, + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String", + }, + }, + }, + }, +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, +}; +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName", + }, + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean", + }, + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean", + }, + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + }, + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + }, + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean", + }, + }, + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123", + }, + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String", + }, + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String", + }, + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray", + }, + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String", + }, + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String", + }, + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean", + }, + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean", + }, + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String", + }, + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123", + }, + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number", + }, + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean", + }, + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold", + ], + }, + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String", + }, + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123", + }, + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number", + }, + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean", + }, + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String", + }, + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix", + }, + }, + }, + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName", + }, + }, + }, + }, +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, + }, + }, + }, +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, + }, + }, + }, +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number", + }, + }, + }, + }, +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange", + }, + }, + }, + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number", + }, + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number", + }, + }, + }, + }, +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number", + }, + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number", + }, + }, + }, + }, +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String", + }, + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String", + }, + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization", + }, + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization", + }, + }, + }, + }, +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat", + }, + }, + }, + }, +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"], + }, + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + }, + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + }, + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + }, + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "Dictionary", + value: { type: { name: "any" } }, + }, + }, + }, + }, +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String", + }, + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String", + }, + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String", + }, + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String", + }, + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String", + }, + }, + }, + }, +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField", + }, + }, + }, + }, + }, + }, +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String", + }, + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String", + }, + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number", + }, + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number", + }, + }, + }, + }, +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String", + }, + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String", + }, + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + }, + }, +}; +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String", + }, + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean", + }, + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String", + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String", + }, + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean", + }, + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String", + }, + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean", + }, + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number", + }, + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + }, + }, +}; +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + }, + }, +}; +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + }, + }, +}; +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + AccessPolicy: AccessPolicy, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobFlatListSegment: BlobFlatListSegment, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPrefix: BlobPrefix, + BlobPropertiesInternal: BlobPropertiesInternal, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobServiceProperties: BlobServiceProperties, + BlobServiceStatistics: BlobServiceStatistics, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobTag: BlobTag, + BlobTags: BlobTags, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + Block: Block, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockList: BlockList, + BlockLookupList: BlockLookupList, + ClearRange: ClearRange, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerItem: ContainerItem, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerProperties: ContainerProperties, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + CorsRule: CorsRule, + DelimitedTextConfiguration: DelimitedTextConfiguration, + FilterBlobItem: FilterBlobItem, + FilterBlobSegment: FilterBlobSegment, + GeoReplication: GeoReplication, + JsonTextConfiguration: JsonTextConfiguration, + KeyInfo: KeyInfo, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + ListContainersSegmentResponse: ListContainersSegmentResponse, + Logging: Logging, + Metrics: Metrics, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageList: PageList, + PageRange: PageRange, + QueryFormat: QueryFormat, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + RetentionPolicy: RetentionPolicy, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + SignedIdentifier: SignedIdentifier, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + UserDelegationKey: UserDelegationKey +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String", + }, + }, +}; +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties, +}; +const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, +}; +const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String", + }, + }, + skipEncoding: true, +}; +const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String", + }, + }, +}; +const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number", + }, + }, +}; +const version = { + parameterPath: "version", + mapper: { + defaultValue: "2025-01-05", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String", + }, + }, +}; +const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, +}; +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, +}; +const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String", + }, + }, +}; +const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String", + }, + }, +}; +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number", + }, + }, +}; +const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"], + }, + }, + }, + }, + collectionFormat: "CSV", +}; +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo, +}; +const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String", + }, + }, +}; +const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream", + }, + }, +}; +const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number", + }, + }, +}; +const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String", + }, + }, +}; +const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String", + }, + }, +}; +const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String", + }, + }, +}; +const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, +}; +const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, +}; +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope", + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String", + }, + }, +}; +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride", + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean", + }, + }, +}; +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, +}; +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier", + }, + }, + }, + }, +}; +const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String", + }, + }, +}; +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String", + }, + }, +}; +const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String", + }, + }, +}; +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String", + }, + }, +}; +const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number", + }, + }, +}; +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String", + }, + }, +}; +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, + }, +}; +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number", + }, + }, +}; +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", + }, + }, +}; +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String", + }, + }, +}; +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions", + ], + }, + }, + }, + }, + collectionFormat: "CSV", +}; +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String", + }, + }, +}; +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String", + }, + }, +}; +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String", + }, + }, +}; +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String", + }, + }, +}; +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean", + }, + }, +}; +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean", + }, + }, +}; +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String", + }, + }, +}; +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String", + }, + }, +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String", + }, + }, +}; +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String", + }, + }, +}; +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String", + }, + }, +}; +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"], + }, + }, +}; +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String", + }, + }, +}; +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String", + }, + }, +}; +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String", + }, + }, +}; +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String", + }, + }, +}; +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String", + }, + }, +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String", + }, + }, +}; +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String", + }, + }, +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String", + }, + }, +}; +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, +}; +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince", + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince", + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String", + }, + }, +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch", + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String", + }, + }, +}; +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String", + }, + }, +}; +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, +}; +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String", + }, + }, +}; +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean", + }, + }, +}; +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, + }, +}; +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String", + }, + }, +}; +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray", + }, + }, +}; +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String", + }, + }, +}; +const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"], + }, + }, +}; +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String", + }, + }, +}; +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String", + }, + }, +}; +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest, +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags, +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray", + }, + }, +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, +}; +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, +}; +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + defaultValue: 0, + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, +}; +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String", + }, + }, +}; +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream", + }, + }, +}; +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, +}; +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String", + }, + }, +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number", + }, + }, +}; +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number", + }, + }, +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number", + }, + }, +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String", + }, + }, +}; +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, +}; +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String", + }, + }, +}; +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray", + }, + }, +}; +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String", + }, + }, +}; +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String", + }, + }, +}; +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String", + }, + }, +}; +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"], + }, + }, +}; +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number", + }, + }, +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition", + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number", + }, + }, +}; +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String", + }, + }, +}; +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean", + }, + }, +}; +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String", + }, + }, +}; +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList, +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"], + }, + }, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing Service operations. */ +class ServiceImpl { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders, + }, + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders, + }, + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders, + }, + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: ServiceSubmitBatchHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders, + }, + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + accept, + version, + requestId, + contentLength, + multipartContentType, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing Container operations. */ +class ContainerImpl { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + return this.client.sendOperationRequest({ options }, createOperationSpec$2); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + return this.client.sendOperationRequest({ options }, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" }, + }, + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + }, + headersMapper: ContainerGetAccessPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders, + }, + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4, +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: ContainerSubmitBatchHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders, + }, + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2, + ], + urlParameters: [url], + headerParameters: [ + accept, + version, + requestId, + contentLength, + multipartContentType, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4, +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing Blob operations. */ +class BlobImpl { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + return this.client.sendOperationRequest({ options }, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + return this.client.sendOperationRequest({ options }, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobDownloadHeaders, + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobDownloadHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp12, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp12, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp13, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders, + }, + 202: { + headersMapper: BlobSetTierHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [ + comp, + timeoutInSeconds, + restype1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobQueryHeaders, + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobQueryHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders, + }, + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3, +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders, + }, + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing PageBlob operations. */ +class PageBlobImpl { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$2, +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource, + ], + isXML: true, + serializer: xmlSerializer$2, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing AppendBlob operations. */ +class AppendBlobImpl { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1, + ], + isXML: true, + serializer: xmlSerializer$1, +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$1, +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1, + ], + isXML: true, + serializer: xmlSerializer$1, +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition, + ], + isXML: true, + serializer: xmlSerializer$1, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing BlockBlob operations. */ +class BlockBlobImpl { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + blobType2, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer, +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties, + ], + isXML: true, + serializer: xmlSerializer, +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer, +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1, + ], + isXML: true, + serializer: xmlSerializer, +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders, + }, + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer, +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + ], + isXML: true, + serializer: xmlSerializer, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + var _a, _b; + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + const defaults = { + requestContentType: "application/json; charset=utf-8", + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.26.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { + userAgentPrefix, + }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); + super(optionsWithDefaults); + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2025-01-05"; + this.service = new ServiceImpl(this); + this.container = new ContainerImpl(this); + this.blob = new BlobImpl(this); + this.pageBlob = new PageBlobImpl(this); + this.appendBlob = new AppendBlobImpl(this); + this.blockBlob = new BlockBlobImpl(this); + } +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * @internal + */ +class StorageContextClient extends StorageClient$1 { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = Object.assign({}, operationSpec); + if (operationSpecToSend.path === "/{containerName}" || + operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; + } + return super.sendOperationRequest(operationArguments, operationSpecToSend); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = getCredentialFromPipeline(pipeline); + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Creates a span using the global tracer. + * @internal + */ +const tracingClient = coreTracing.createTracingClient({ + packageName: "@azure/storage-blob", + packageVersion: SDK_VERSION, + namespace: "Microsoft.Storage", +}); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +class SASQueryParameters { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", // Signed object ID + "sktid", // Signed tenant ID + "skt", // Signed key start time + "ske", // Signed key expiry time + "sks", // Signed key service + "skv", // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; +} +function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), + stringToSign: stringToSign, + }; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +class BlobLeaseClient { + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; + } + else { + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; + } + if (!leaseId) { + leaseId = coreUtil.randomUUID(); + } + this._leaseId = leaseId; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + this._leaseId = proposedLeaseId; + return response; + }); + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + var _a; + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }); + }); + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + var _a; + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }; + return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +class RetriableReadableStream extends stream.Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceAbortedHandler = () => { + const abortError = new abortController.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + // needed for Node14 + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +class BlobDownloadResponse { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * Returns the date and time the blob was created. + * + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (_a) { + // no-op + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroReader { + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects() { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (_a) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +class BlobQueryResponse { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed. + */ + BlockBlobTier["Cold"] = "Cold"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +/** + * + * To get OAuth audience for a storage account for blob service. + */ +function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreUtil.delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const maxBufferLength = buffer.constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = this.storageClientContext.blob; + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreUtil.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + }); + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + throw e; + } + }); + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + }); + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.delete(updatedOptions)); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + tags: toBlobTags(tags), + })); + }); + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + }); + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f, _g; + return assertResponse(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, + immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + var _a; + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (blockSize === 0) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + }); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + }); + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return assertResponse(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; + } + /** + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve) => { + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign; + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options = {}) { + return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Set immutability policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options = {}) { + return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = this.storageClientContext.appendBlob; + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + const conditions = { ifNoneMatch: ETagAny }; + return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + if (!coreUtil.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + }); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f; + return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + }); + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + }); + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (coreUtil.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + }); + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + var _a, _b; + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); + } + const numBlocks = Math.floor((size - 1) / blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + }); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await fsStat(filePath)).size; + return this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + }); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + let blockNum = 0; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + }); + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = this.storageClientContext.pageBlob; + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.pageBlobContext.create(0, size, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.uploadPages(count, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.clearPages(0, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + marker: marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + var _a; + const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshot, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(result); + }); + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, + leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshotOrUrl, + range: rangeToString({ + offset: offset, + count: count, + }), + marker: marker, + maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevSnapshotUrl, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + }); + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + }); + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreUtil.randomUUID(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const corePipeline = coreRestPipeline.createEmptyPipeline(); + corePipeline.addPolicy(coreClient.serializationPolicy({ + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#", + }, + }, + }), { phase: "Serialize" }); + // Use batch header filter policy to exclude unnecessary headers + corePipeline.addPolicy(batchHeaderFilterPolicy()); + // Use batch assemble policy to assemble request and intercept request from going to wire + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + const pipeline = new Pipeline([]); + // attach the v2 pipeline to this one + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, // sub request constant prefix + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID + "", // empty line after sub request's content ID + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const [name, value] of request.headers) { + this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request) { + batchRequest.appendSubRequestToBody(request); + return { + request, + status: 200, + headers: coreRestPipeline.createHttpHeaders(), + }; + }, + }; +} +function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request, next) { + let xMsHeaderName = ""; + for (const [name] of request.headers) { + if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; + } + } + if (xMsHeaderName !== "") { + request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = storageClientContext.container; + } + else { + this.serviceOrContainerContext = storageClientContext.service; + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + }); + } +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +class ContainerClient extends StorageClient { + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = this.storageClientContext.container; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.create(updatedOptions)); + }); + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - + */ + async createIfNotExists(options = {}) { + return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + else { + throw e; + } + } + }); + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + }); + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.getAccessPolicy({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + }); + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return assertResponse(await this.containerContext.setAccessPolicy({ + abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + }); + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return blobClient.delete(updatedOptions); + }); + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + }); + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsFlatSegmentResponse = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter_1) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsHierarchySegmentResponse = _c; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_3, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.pathname.split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.pathname.split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.pathname.split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; + } + /** + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve) => { + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign; + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to delete blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) + .sasQueryParameters; +} +function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope), + stringToSign: stringToSign, + }; +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + }); + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + return containerClient.delete(updatedOptions); + }); + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = assertResponse(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, + tracingOptions: updatedOptions.tracingOptions, + })); + return { containerClient, containerUndeleteResponse }; + }); + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { + var _a; + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = containerClient["storageClientContext"].container; + const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); + return { containerClient, containerRenameResponse }; + }); + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + }); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + return generateAccountSASQueryParametersInternal(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +exports.KnownEncryptionAlgorithmType = void 0; +(function (KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); + +Object.defineProperty(exports, "RestError", ({ + enumerable: true, + get: function () { return coreRestPipeline.RestError; } +})); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 2856: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(84492).Writable) +const inherits = (__nccwpck_require__(47261).inherits) + +const StreamSearch = __nccwpck_require__(88534) + +const PartStream = __nccwpck_require__(38710) +const HeaderParser = __nccwpck_require__(90333) + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer + + +/***/ }), + +/***/ 90333: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = (__nccwpck_require__(15673).EventEmitter) +const inherits = (__nccwpck_require__(47261).inherits) +const getLimit = __nccwpck_require__(49692) + +const StreamSearch = __nccwpck_require__(88534) + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser + + +/***/ }), + +/***/ 38710: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const inherits = (__nccwpck_require__(47261).inherits) +const ReadableStream = (__nccwpck_require__(84492).Readable) + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream + + +/***/ }), + +/***/ 88534: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = (__nccwpck_require__(15673).EventEmitter) +const inherits = (__nccwpck_require__(47261).inherits) + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH + + +/***/ }), + +/***/ 33438: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(84492).Writable) +const { inherits } = __nccwpck_require__(47261) +const Dicer = __nccwpck_require__(2856) + +const MultipartParser = __nccwpck_require__(90415) +const UrlencodedParser = __nccwpck_require__(16780) +const parseParams = __nccwpck_require__(34426) + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports["default"] = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer + + +/***/ }), + +/***/ 90415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = __nccwpck_require__(84492) +const { inherits } = __nccwpck_require__(47261) + +const Dicer = __nccwpck_require__(2856) + +const parseParams = __nccwpck_require__(34426) +const decodeText = __nccwpck_require__(99136) +const basename = __nccwpck_require__(60496) +const getLimit = __nccwpck_require__(49692) + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (!boy._events.file) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart + + +/***/ }), + +/***/ 16780: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Decoder = __nccwpck_require__(89730) +const decodeText = __nccwpck_require__(99136) +const getLimit = __nccwpck_require__(49692) + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded + + +/***/ }), + +/***/ 89730: +/***/ ((module) => { + +"use strict"; + + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder + + +/***/ }), + +/***/ 60496: +/***/ ((module) => { + +"use strict"; + + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} + + +/***/ }), + +/***/ 99136: +/***/ ((module) => { + +"use strict"; + + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function decodeText (text, textEncoding, destEncoding) { + if (text) { + if (textDecoders.has(destEncoding)) { + try { + return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) + } catch (e) { } + } else { + try { + textDecoders.set(destEncoding, new TextDecoder(destEncoding)) + return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding)) + } catch (e) { } + } + } + return text +} + +module.exports = decodeText + + +/***/ }), + +/***/ 49692: +/***/ ((module) => { + +"use strict"; + + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} + + +/***/ }), + +/***/ 34426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const decodeText = __nccwpck_require__(99136) + +const RE_ENCODED = /%([a-fA-F0-9]{2})/g + +function encodedReplacer (match, byte) { + return String.fromCharCode(parseInt(byte, 16)) +} + +function parseParams (str) { + const res = [] + let state = 'key' + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + + for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = 'key' + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === 'charset' || state === 'lang') && char === "'") { + if (state === 'charset') { + state = 'lang' + charset = tmp.substring(1) + } else { state = 'value' } + tmp = '' + continue + } else if (state === 'key' && + (char === '*' || char === '=') && + res.length) { + if (char === '*') { state = 'charset' } else { state = 'value' } + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = 'key' + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams + + +/***/ }), + +/***/ 46412: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(61546); +const extend = __nccwpck_require__(38171); +const resource_stream_1 = __nccwpck_require__(72199); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); + } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 72199: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(12781); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; + } + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map + +/***/ }), + +/***/ 3497: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(12781); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 19203: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 44458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(33542)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(29411)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(83424)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(64051)); + +var _nil = _interopRequireDefault(__nccwpck_require__(46570)); + +var _version = _interopRequireDefault(__nccwpck_require__(35611)); + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +var _parse = _interopRequireDefault(__nccwpck_require__(99645)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 84953: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 46570: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 99645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 94323: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 91430: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 77416: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 32122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 33542: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(91430)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 29411: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(74624)); + +var _md = _interopRequireDefault(__nccwpck_require__(84953)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 74624: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +var _parse = _interopRequireDefault(__nccwpck_require__(99645)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 83424: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(91430)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(32122)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 64051: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(74624)); + +var _sha = _interopRequireDefault(__nccwpck_require__(77416)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 20937: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(94323)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 35611: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(20937)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 40334: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + + return `token ${token}`; +} + +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 76762: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var universalUserAgent = __nccwpck_require__(45030); +var beforeAfterHook = __nccwpck_require__(83682); +var request = __nccwpck_require__(36234); +var graphql = __nccwpck_require__(88467); +var authToken = __nccwpck_require__(40334); + +function _objectWithoutPropertiesLoose(source, excluded) { + if (source == null) return {}; + var target = {}; + var sourceKeys = Object.keys(source); + var key, i; + + for (i = 0; i < sourceKeys.length; i++) { + key = sourceKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + target[key] = source[key]; + } + + return target; +} + +function _objectWithoutProperties(source, excluded) { + if (source == null) return {}; + + var target = _objectWithoutPropertiesLoose(source, excluded); + + var key, i; + + if (Object.getOwnPropertySymbols) { + var sourceSymbolKeys = Object.getOwnPropertySymbols(source); + + for (i = 0; i < sourceSymbolKeys.length; i++) { + key = sourceSymbolKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; + target[key] = source[key]; + } + } + + return target; +} + +const VERSION = "3.6.0"; + +const _excluded = ["authStrategy"]; +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { + authStrategy + } = options, + otherOptions = _objectWithoutProperties(options, _excluded); + + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); + } + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 59440: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var isPlainObject = __nccwpck_require__(63287); +var universalUserAgent = __nccwpck_require__(45030); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject.isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} + +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + + return obj; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging + + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; + } + } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.12"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 88467: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var request = __nccwpck_require__(36234); +var universalUserAgent = __nccwpck_require__(45030); + +const VERSION = "4.8.0"; + +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); +} + +class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. + + this.errors = response.errors; + this.data = response.data; // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } + +} + +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } + + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; + return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + } + } + + const parsedOptions = typeof query === "string" ? Object.assign({ + query + }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + + if (!result.variables) { + result.variables = {}; + } + + result.variables[key] = parsedOptions[key]; + return result; + }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 + + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + + return request(requestOptions).then(response => { + if (response.data.errors) { + const headers = {}; + + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + + throw new GraphqlResponseError(requestOptions, headers, response.data); + } + + return response.data.data; + }); +} + +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); + + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} + +exports.GraphqlResponseError = GraphqlResponseError; +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 64193: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const VERSION = "2.21.3"; + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + enumerableOnly && (symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + })), keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = null != arguments[i] ? arguments[i] : {}; + i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { + _defineProperty(target, key, source[key]); + }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return _objectSpread2(_objectSpread2({}, response), {}, { + data: [] + }); + } + + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + + response.data.total_count = totalCount; + return response; +} + +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) return { + done: true + }; + + try { + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } catch (error) { + if (error.status !== 409) throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + + }) + }; +} + +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} + +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } + + let earlyExit = false; + + function done() { + earlyExit = true; + } + + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + + if (earlyExit) { + return results; + } + + return gather(octokit, results, iterator, mapFn); + }); +} + +const composePaginateRest = Object.assign(paginate, { + iterator +}); + +const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; + +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; + +exports.composePaginateRest = composePaginateRest; +exports.isPaginatingEndpoint = isPaginatingEndpoint; +exports.paginateRest = paginateRest; +exports.paginatingEndpoints = paginatingEndpoints; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 83044: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +const Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], + addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], + deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], + disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], + enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], + getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], + getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], + getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], + getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], + getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], + getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], + getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { + renamed: ["actions", "getGithubActionsPermissionsRepository"] + }], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], + listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], + listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], + removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], + setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], + setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], + setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], + setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], + setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], + setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], + setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], + setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], + setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { + renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] + }], + addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], listWebhookDeliveries: ["GET /app/hook/deliveries"], redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { @@ -25705,6 +54922,32 @@ module.exports = { /***/ }), +/***/ 4958: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} + +module.exports = getIgnoreAttributesFn + +/***/ }), + /***/ 38280: /***/ ((__unused_webpack_module, exports) => { @@ -26225,6 +55468,7 @@ function getPositionFromMatch(match) { //parse Empty Node as self closing node const buildFromOrderedJs = __nccwpck_require__(72462); +const getIgnoreAttributesFn = __nccwpck_require__(4958) const defaultOptions = { attributeNamePrefix: '@_', @@ -26262,11 +55506,12 @@ const defaultOptions = { function Builder(options) { this.options = Object.assign({}, defaultOptions, options); - if (this.options.ignoreAttributes || this.options.attributesGroupName) { + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { this.isAttribute = function(/*a*/) { return false; }; } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) this.attrPrefixLen = this.options.attributeNamePrefix.length; this.isAttribute = isAttribute; } @@ -26295,13 +55540,14 @@ Builder.prototype.build = function(jObj) { [this.options.arrayNodeName] : jObj } } - return this.j2x(jObj, 0).val; + return this.j2x(jObj, 0, []).val; } }; -Builder.prototype.j2x = function(jObj, level) { +Builder.prototype.j2x = function(jObj, level, ajPath) { let attrStr = ''; let val = ''; + const jPath = ajPath.join('.') for (let key in jObj) { if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; if (typeof jObj[key] === 'undefined') { @@ -26324,9 +55570,9 @@ Builder.prototype.j2x = function(jObj, level) { } else if (typeof jObj[key] !== 'object') { //premitive type const attr = this.isAttribute(key); - if (attr) { + if (attr && !this.ignoreAttributesFn(attr, jPath)) { attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); - }else { + } else if (!attr) { //tag value if (key === this.options.textNodeName) { let newval = this.options.tagValueProcessor(key, '' + jObj[key]); @@ -26339,6 +55585,7 @@ Builder.prototype.j2x = function(jObj, level) { //repeated nodes const arrLen = jObj[key].length; let listTagVal = ""; + let listTagAttr = ""; for (let j = 0; j < arrLen; j++) { const item = jObj[key][j]; if (typeof item === 'undefined') { @@ -26348,17 +55595,27 @@ Builder.prototype.j2x = function(jObj, level) { else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; } else if (typeof item === 'object') { - if(this.options.oneListGroup ){ - listTagVal += this.j2x(item, level + 1).val; + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } }else{ - listTagVal += this.processTextOrObjNode(item, key, level) + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) } } else { - listTagVal += this.buildTextValNode(item, key, '', level); + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } } } if(this.options.oneListGroup){ - listTagVal = this.buildObjectNode(listTagVal, key, '', level); + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); } val += listTagVal; } else { @@ -26370,7 +55627,7 @@ Builder.prototype.j2x = function(jObj, level) { attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); } } else { - val += this.processTextOrObjNode(jObj[key], key, level) + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) } } } @@ -26385,8 +55642,8 @@ Builder.prototype.buildAttrPairStr = function(attrName, val){ } else return ' ' + attrName + '="' + val + '"'; } -function processTextOrObjNode (object, key, level) { - const result = this.j2x(object, level + 1); +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); } else { @@ -26862,6 +56119,7 @@ const util = __nccwpck_require__(38280); const xmlNode = __nccwpck_require__(7462); const readDocType = __nccwpck_require__(6072); const toNumber = __nccwpck_require__(14526); +const getIgnoreAttributesFn = __nccwpck_require__(4958) // const regx = // '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' @@ -26910,6 +56168,7 @@ class OrderedObjParser{ this.readStopNodeData = readStopNodeData; this.saveTextToParentTag = saveTextToParentTag; this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) } } @@ -26982,7 +56241,7 @@ function resolveNameSpace(tagname) { const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); function buildAttributesMap(attrStr, jPath, tagName) { - if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { // attrStr = attrStr.replace(/\r?\n/g, ' '); //attrStr = attrStr || attrStr.trim(); @@ -26991,6 +56250,9 @@ function buildAttributesMap(attrStr, jPath, tagName) { const attrs = {}; for (let i = 0; i < len; i++) { const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } let oldVal = matches[i][4]; let aName = this.options.attributeNamePrefix + attrName; if (attrName.length) { @@ -51183,6 +80445,434 @@ module.exports.toUnicode = function(domain_name, useSTD3) { module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; +/***/ }), + +/***/ 4351: +/***/ ((module) => { + +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + function next() { + while (env.stack.length) { + var rec = env.stack.pop(); + try { + var result = rec.dispose && rec.dispose.call(rec.value); + if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + catch (e) { + fail(e); + } + } + if (env.hasError) throw env.error; + } + return next(); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); +}); + + /***/ }), /***/ 74294: @@ -57797,6 +87487,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 14462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 48045: @@ -58629,6 +88445,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045) const { Blob } = __nccwpck_require__(14300) const nodeUtil = __nccwpck_require__(73837) const { stringify } = __nccwpck_require__(63477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(14462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -58838,6 +88655,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -59109,6 +88935,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -65756,14 +95583,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398 const assert = __nccwpck_require__(39491) const { isUint8Array } = __nccwpck_require__(29830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -66291,66 +96122,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -66364,8 +96185,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -66375,7 +96194,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -66384,11 +96207,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -66401,6 +96224,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -66816,7 +96715,8 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord + normalizeMethodRecord, + parseMetadata } @@ -68903,12 +98803,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -76063,1881 +105968,11160 @@ module.exports.parseURL = function (input, options) { /***/ }), -/***/ 83185: -/***/ ((module) => { +/***/ 83185: +/***/ ((module) => { + +"use strict"; + + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + + + +/***/ }), + +/***/ 62940: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), + +/***/ 15185: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + +/***/ }), + +/***/ 17573: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.configAuthentication = void 0; +const fs = __importStar(__nccwpck_require__(57147)); +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const core = __importStar(__nccwpck_require__(42186)); +const github = __importStar(__nccwpck_require__(95438)); +function configAuthentication(registryUrl, alwaysAuth) { + const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); + if (!registryUrl.endsWith('/')) { + registryUrl += '/'; + } + writeRegistryToFile(registryUrl, npmrc, alwaysAuth); +} +exports.configAuthentication = configAuthentication; +function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { + let scope = core.getInput('scope'); + if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { + scope = github.context.repo.owner; + } + if (scope && scope[0] != '@') { + scope = '@' + scope; + } + if (scope) { + scope = scope.toLowerCase() + ':'; + } + core.debug(`Setting auth in ${fileLocation}`); + let newContents = ''; + if (fs.existsSync(fileLocation)) { + const curContents = fs.readFileSync(fileLocation, 'utf8'); + curContents.split(os.EOL).forEach((line) => { + // Add current contents unless they are setting the registry + if (!line.toLowerCase().startsWith(`${scope}registry`)) { + newContents += line + os.EOL; + } + }); + } + // Remove http: or https: from front of registry. + const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; + const registryString = `${scope}registry=${registryUrl}`; + const alwaysAuthString = `always-auth=${alwaysAuth}`; + newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; + fs.writeFileSync(fileLocation, newContents); + core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); + // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it + core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); +} + + +/***/ }), + +/***/ 19517: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restoreCache = void 0; +const cache = __importStar(__nccwpck_require__(27799)); +const core = __importStar(__nccwpck_require__(42186)); +const glob = __importStar(__nccwpck_require__(28090)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const os_1 = __importDefault(__nccwpck_require__(22037)); +const constants_1 = __nccwpck_require__(69042); +const cache_utils_1 = __nccwpck_require__(41678); +const restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const packageManagerInfo = yield (0, cache_utils_1.getPackageManagerInfo)(packageManager); + if (!packageManagerInfo) { + throw new Error(`Caching for '${packageManager}' is not supported`); + } + const platform = process.env.RUNNER_OS; + const arch = os_1.default.arch(); + const cachePaths = yield (0, cache_utils_1.getCacheDirectories)(packageManagerInfo, cacheDependencyPath); + core.saveState(constants_1.State.CachePaths, cachePaths); + const lockFilePath = cacheDependencyPath + ? cacheDependencyPath + : findLockFile(packageManagerInfo); + const fileHash = yield glob.hashFiles(lockFilePath); + if (!fileHash) { + throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); + } + const keyPrefix = `node-cache-${platform}-${arch}-${packageManager}`; + const primaryKey = `${keyPrefix}-${fileHash}`; + core.debug(`primary key is ${primaryKey}`); + core.saveState(constants_1.State.CachePrimaryKey, primaryKey); + const isManagedByYarnBerry = yield (0, cache_utils_1.repoHasYarnBerryManagedDependencies)(packageManagerInfo, cacheDependencyPath); + let cacheKey; + if (isManagedByYarnBerry) { + core.info('All dependencies are managed locally by yarn3, the previous cache can be used'); + cacheKey = yield cache.restoreCache(cachePaths, primaryKey, [keyPrefix]); + } + else { + cacheKey = yield cache.restoreCache(cachePaths, primaryKey); + } + core.setOutput('cache-hit', Boolean(cacheKey)); + if (!cacheKey) { + core.info(`${packageManager} cache is not found`); + return; + } + core.saveState(constants_1.State.CacheMatchedKey, cacheKey); + core.info(`Cache restored from key: ${cacheKey}`); +}); +exports.restoreCache = restoreCache; +const findLockFile = (packageManager) => { + const lockFiles = packageManager.lockFilePatterns; + const workspace = process.env.GITHUB_WORKSPACE; + const rootContent = fs_1.default.readdirSync(workspace); + const lockFile = lockFiles.find(item => rootContent.includes(item)); + if (!lockFile) { + throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); + } + return path_1.default.join(workspace, lockFile); +}; + + +/***/ }), + +/***/ 41678: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCacheFeatureAvailable = exports.isGhes = exports.repoHasYarnBerryManagedDependencies = exports.getCacheDirectories = exports.resetProjectDirectoriesMemoized = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const exec = __importStar(__nccwpck_require__(71514)); +const cache = __importStar(__nccwpck_require__(27799)); +const glob = __importStar(__nccwpck_require__(28090)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const util_1 = __nccwpck_require__(92629); +exports.supportedPackageManagers = { + npm: { + name: 'npm', + lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], + getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('npm config get cache', 'Could not get npm cache folder path') + }, + pnpm: { + name: 'pnpm', + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('pnpm store path --silent', 'Could not get pnpm cache folder path') + }, + yarn: { + name: 'yarn', + lockFilePatterns: ['yarn.lock'], + getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () { + const yarnVersion = yield (0, exports.getCommandOutputNotEmpty)(`yarn --version`, 'Could not retrieve version of yarn', projectDir); + core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`); + const stdOut = yarnVersion.startsWith('1.') + ? yield (0, exports.getCommandOutput)('yarn cache dir', projectDir) + : yield (0, exports.getCommandOutput)('yarn config get cacheFolder', projectDir); + if (!stdOut) { + throw new Error(`Could not get yarn cache folder path for ${projectDir}`); + } + return stdOut; + }) + } +}; +const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd }))); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +exports.getCommandOutput = getCommandOutput; +const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = (0, exports.getCommandOutput)(toolCommand, cwd); + if (!stdOut) { + throw new Error(error); + } + return stdOut; +}); +exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty; +const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + return exports.supportedPackageManagers.yarn; + } + else { + return null; + } +}); +exports.getPackageManagerInfo = getPackageManagerInfo; +/** + * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` + * - first through `getCacheDirectories` + * - second from `repoHasYarn3ManagedCache` + * + * it contains expensive IO operation and thus should be memoized + */ +let projectDirectoriesMemoized = null; +/** + * unit test must reset memoized variables + */ +const resetProjectDirectoriesMemoized = () => (projectDirectoriesMemoized = null); +exports.resetProjectDirectoriesMemoized = resetProjectDirectoriesMemoized; +/** + * Expands (converts) the string input `cache-dependency-path` to list of directories that + * may be project roots + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of directories and possible + */ +const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (projectDirectoriesMemoized !== null) { + return projectDirectoriesMemoized; + } + const globber = yield glob.create(cacheDependencyPath); + const cacheDependenciesPaths = yield globber.glob(); + const existingDirectories = cacheDependenciesPaths + .map(path_1.default.dirname) + .filter((0, util_1.unique)()) + .map(dirName => fs_1.default.realpathSync(dirName)) + .filter(directory => fs_1.default.lstatSync(directory).isDirectory()); + if (!existingDirectories.length) + core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`); + projectDirectoriesMemoized = existingDirectories; + return existingDirectories; +}); +/** + * Finds the cache directories configured for the repo if cache-dependency-path is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath); + const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(projectDirectory); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`); + return cacheFolderPath; + }))); + // uniq in order to do not cache the same directories twice + return cacheFoldersPaths.filter((0, util_1.unique)()); +}); +/** + * Finds the cache directories configured for the repo ignoring cache-dependency-path + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @return list of files on which the cache depends + */ +const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`); + return [cacheFolderPath]; +}); +/** + * A function to find the cache directories configured for the repo + * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project + // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 + if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { + return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath); + } + return getCacheDirectoriesForRootProject(packageManagerInfo); +}); +exports.getCacheDirectories = getCacheDirectories; +/** + * A function to check if the directory is a yarn project configured to manage + * obsolete dependencies in the local cache + * @param directory - a path to the folder + * @return - true if the directory's project is yarn managed + * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false + * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false + * - if local cache is not explicitly enabled (not yarn3), return false + * - return true otherwise + */ +const projectHasYarnBerryManagedDependencies = (directory) => __awaiter(void 0, void 0, void 0, function* () { + const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; + core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); + // if .yarn/cache directory exists the cache is managed by version control system + const yarnCacheFile = path_1.default.join(workDir, '.yarn', 'cache'); + if (fs_1.default.existsSync(yarnCacheFile) && + fs_1.default.lstatSync(yarnCacheFile).isDirectory()) { + core.debug(`"${workDir}" has .yarn/cache - dependencies are kept in the repository`); + return Promise.resolve(false); + } + // NOTE: yarn1 returns 'undefined' with return code = 0 + const enableGlobalCache = yield (0, exports.getCommandOutput)('yarn config get enableGlobalCache', workDir); + // only local cache is not managed by yarn + const managed = enableGlobalCache.includes('false'); + if (managed) { + core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); + return true; + } + else { + core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); + return false; + } +}); +/** + * A function to report the repo contains Yarn managed projects + * @param packageManagerInfo - used to make sure current package manager is yarn + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return - true if all project directories configured to be Yarn managed + */ +const repoHasYarnBerryManagedDependencies = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManagerInfo.name !== 'yarn') + return false; + const yarnDirs = cacheDependencyPath + ? yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) + : ['']; + const isManagedList = yield Promise.all(yarnDirs.map(projectHasYarnBerryManagedDependencies)); + return isManagedList.every(Boolean); +}); +exports.repoHasYarnBerryManagedDependencies = repoHasYarnBerryManagedDependencies; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === 'GITHUB.COM'; + const isGitHubEnterpriseCloudHost = hostname.endsWith('.GHE.COM'); + const isLocalHost = hostname.endsWith('.LOCALHOST'); + return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (cache.isFeatureAvailable()) + return true; + if (isGhes()) { + core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + return false; + } + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + return false; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + + +/***/ }), + +/***/ 69042: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Outputs = exports.State = exports.LockType = void 0; +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType || (exports.LockType = LockType = {})); +var State; +(function (State) { + State["CachePackageManager"] = "SETUP_NODE_CACHE_PACKAGE_MANAGER"; + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; + State["CachePaths"] = "CACHE_PATHS"; +})(State || (exports.State = State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs || (exports.Outputs = Outputs = {})); + + +/***/ }), + +/***/ 70957: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tc = __importStar(__nccwpck_require__(27784)); +const semver_1 = __importDefault(__nccwpck_require__(11383)); +const base_distribution_1 = __importDefault(__nccwpck_require__(80007)); +class BasePrereleaseNodejs extends base_distribution_1.default { + constructor(nodeInfo) { + super(nodeInfo); + } + findVersionInHostedToolCacheDirectory() { + let toolPath = ''; + const localVersionPaths = tc + .findAllVersions('node', this.nodeInfo.arch) + .filter(i => { + const prerelease = semver_1.default.prerelease(i, {}); + if (!prerelease) { + return false; + } + return prerelease[0].toString().includes(this.distribution); + }); + localVersionPaths.sort(semver_1.default.rcompare); + const localVersion = this.evaluateVersions(localVersionPaths); + if (localVersion) { + toolPath = tc.find('node', localVersion, this.nodeInfo.arch); + } + return toolPath; + } + validRange(versionSpec) { + let range; + const [raw, prerelease] = this.splitVersionSpec(versionSpec); + const isValidVersion = semver_1.default.valid(raw); + const rawVersion = (isValidVersion ? raw : semver_1.default.coerce(raw)); + if (prerelease !== this.distribution) { + range = versionSpec; + } + else { + range = `${semver_1.default.validRange(`^${rawVersion}-${this.distribution}`)}-0`; + } + return { range, options: { includePrerelease: !isValidVersion } }; + } + splitVersionSpec(versionSpec) { + return versionSpec.split(/-(.*)/s); + } +} +exports["default"] = BasePrereleaseNodejs; + + +/***/ }), + +/***/ 80007: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const uuid_1 = __nccwpck_require__(75840); +const tc = __importStar(__nccwpck_require__(27784)); +const hc = __importStar(__nccwpck_require__(96255)); +const core = __importStar(__nccwpck_require__(42186)); +const io = __importStar(__nccwpck_require__(47351)); +const semver_1 = __importDefault(__nccwpck_require__(11383)); +const assert = __importStar(__nccwpck_require__(39491)); +const path = __importStar(__nccwpck_require__(71017)); +const os_1 = __importDefault(__nccwpck_require__(22037)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +class BaseDistribution { + constructor(nodeInfo) { + this.nodeInfo = nodeInfo; + this.osPlat = os_1.default.platform(); + this.httpClient = new hc.HttpClient('setup-node', [], { + allowRetries: true, + maxRetries: 3 + }); + } + setupNodeJs() { + return __awaiter(this, void 0, void 0, function* () { + let nodeJsVersions; + if (this.nodeInfo.checkLatest) { + const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); + this.nodeInfo.versionSpec = evaluatedVersion; + } + let toolPath = this.findVersionInHostedToolCacheDirectory(); + if (toolPath) { + core.info(`Found in cache @ ${toolPath}`); + } + else { + const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); + const toolName = this.getNodejsDistInfo(evaluatedVersion); + toolPath = yield this.downloadNodejs(toolName); + } + if (this.osPlat != 'win32') { + toolPath = path.join(toolPath, 'bin'); + } + core.addPath(toolPath); + }); + } + findVersionInDist(nodeJsVersions) { + return __awaiter(this, void 0, void 0, function* () { + if (!nodeJsVersions) { + nodeJsVersions = yield this.getNodeJsVersions(); + } + const versions = this.filterVersions(nodeJsVersions); + const evaluatedVersion = this.evaluateVersions(versions); + if (!evaluatedVersion) { + throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); + } + return evaluatedVersion; + }); + } + evaluateVersions(versions) { + let version = ''; + const { range, options } = this.validRange(this.nodeInfo.versionSpec); + core.debug(`evaluating ${versions.length} versions`); + for (const potential of versions) { + const satisfied = semver_1.default.satisfies(potential, range, options); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; + } + findVersionInHostedToolCacheDirectory() { + return tc.find('node', this.nodeInfo.versionSpec, this.translateArchToDistUrl(this.nodeInfo.arch)); + } + getNodeJsVersions() { + return __awaiter(this, void 0, void 0, function* () { + const initialUrl = this.getDistributionUrl(); + const dataUrl = `${initialUrl}/index.json`; + const response = yield this.httpClient.getJson(dataUrl); + return response.result || []; + }); + } + getNodejsDistInfo(version) { + const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); + version = semver_1.default.clean(version) || ''; + const fileName = this.osPlat == 'win32' + ? `node-v${version}-win-${osArch}` + : `node-v${version}-${this.osPlat}-${osArch}`; + const urlFileName = this.osPlat == 'win32' + ? this.nodeInfo.arch === 'arm64' + ? `${fileName}.zip` + : `${fileName}.7z` + : `${fileName}.tar.gz`; + const initialUrl = this.getDistributionUrl(); + const url = `${initialUrl}/v${version}/${urlFileName}`; + return { + downloadUrl: url, + resolvedVersion: version, + arch: osArch, + fileName: fileName + }; + } + downloadNodejs(info) { + return __awaiter(this, void 0, void 0, function* () { + let downloadPath = ''; + core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); + try { + downloadPath = yield tc.downloadTool(info.downloadUrl); + } + catch (err) { + if (err instanceof tc.HTTPError && + err.httpStatusCode == 404 && + this.osPlat == 'win32') { + return yield this.acquireWindowsNodeFromFallbackLocation(info.resolvedVersion, info.arch); + } + throw err; + } + const toolPath = yield this.extractArchive(downloadPath, info, true); + core.info('Done'); + return toolPath; + }); + } + validRange(versionSpec) { + var _a; + let options; + const c = semver_1.default.clean(versionSpec) || ''; + const valid = (_a = semver_1.default.valid(c)) !== null && _a !== void 0 ? _a : versionSpec; + return { range: valid, options }; + } + acquireWindowsNodeFromFallbackLocation(version_1) { + return __awaiter(this, arguments, void 0, function* (version, arch = os_1.default.arch()) { + const initialUrl = this.getDistributionUrl(); + const osArch = this.translateArchToDistUrl(arch); + // Create temporary folder to download to + const tempDownloadFolder = `temp_${(0, uuid_1.v4)()}`; + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + const tempDir = path.join(tempDirectory, tempDownloadFolder); + yield io.mkdirP(tempDir); + let exeUrl; + let libUrl; + try { + exeUrl = `${initialUrl}/v${version}/win-${osArch}/node.exe`; + libUrl = `${initialUrl}/v${version}/win-${osArch}/node.lib`; + core.info(`Downloading only node binary from ${exeUrl}`); + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + catch (err) { + if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { + exeUrl = `${initialUrl}/v${version}/node.exe`; + libUrl = `${initialUrl}/v${version}/node.lib`; + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + else { + throw err; + } + } + const toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); + return toolPath; + }); + } + extractArchive(downloadPath, info, isOfficialArchive) { + return __awaiter(this, void 0, void 0, function* () { + // + // Extract + // + core.info('Extracting ...'); + let extPath; + info = info || {}; // satisfy compiler, never null when reaches here + if (this.osPlat == 'win32') { + const extension = this.nodeInfo.arch === 'arm64' ? '.zip' : '.7z'; + // Rename archive to add extension because after downloading + // archive does not contain extension type and it leads to some issues + // on Windows runners without PowerShell Core. + // + // For default PowerShell Windows it should contain extension type to unpack it. + if (extension === '.zip' && isOfficialArchive) { + const renamedArchive = `${downloadPath}.zip`; + fs_1.default.renameSync(downloadPath, renamedArchive); + extPath = yield tc.extractZip(renamedArchive); + } + else { + const _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); + extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); + } + // 7z extracts to folder matching file name + const nestedPath = path.join(extPath, path.basename(info.fileName, extension)); + if (fs_1.default.existsSync(nestedPath)) { + extPath = nestedPath; + } + } + else { + extPath = yield tc.extractTar(downloadPath, undefined, [ + 'xz', + '--strip', + '1' + ]); + } + // + // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded + // + core.info('Adding to the cache ...'); + const toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); + return toolPath; + }); + } + getDistFileName() { + const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); + // node offers a json list of versions + let dataFileName; + switch (this.osPlat) { + case 'linux': + dataFileName = `linux-${osArch}`; + break; + case 'darwin': + dataFileName = `osx-${osArch}-tar`; + break; + case 'win32': + if (this.nodeInfo.arch === 'arm64') { + dataFileName = `win-${osArch}-zip`; + } + else { + dataFileName = `win-${osArch}-exe`; + } + break; + default: + throw new Error(`Unexpected OS '${this.osPlat}'`); + } + return dataFileName; + } + filterVersions(nodeJsVersions) { + const versions = []; + const dataFileName = this.getDistFileName(); + nodeJsVersions.forEach((nodeVersion) => { + // ensure this version supports your os and platform + if (nodeVersion.files.indexOf(dataFileName) >= 0) { + versions.push(nodeVersion.version); + } + }); + return versions.sort(semver_1.default.rcompare); + } + translateArchToDistUrl(arch) { + switch (arch) { + case 'arm': + return 'armv7l'; + default: + return arch; + } + } +} +exports["default"] = BaseDistribution; + + +/***/ }), + +/***/ 85617: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getNodejsDistribution = void 0; +const nightly_builds_1 = __importDefault(__nccwpck_require__(57127)); +const official_builds_1 = __importDefault(__nccwpck_require__(77854)); +const rc_builds_1 = __importDefault(__nccwpck_require__(98837)); +const canary_builds_1 = __importDefault(__nccwpck_require__(50969)); +var Distributions; +(function (Distributions) { + Distributions["DEFAULT"] = ""; + Distributions["CANARY"] = "v8-canary"; + Distributions["NIGHTLY"] = "nightly"; + Distributions["RC"] = "rc"; +})(Distributions || (Distributions = {})); +function getNodejsDistribution(installerOptions) { + const versionSpec = installerOptions.versionSpec; + let distribution; + if (versionSpec.includes(Distributions.NIGHTLY)) { + distribution = new nightly_builds_1.default(installerOptions); + } + else if (versionSpec.includes(Distributions.CANARY)) { + distribution = new canary_builds_1.default(installerOptions); + } + else if (versionSpec.includes(Distributions.RC)) { + distribution = new rc_builds_1.default(installerOptions); + } + else { + distribution = new official_builds_1.default(installerOptions); + } + return distribution; +} +exports.getNodejsDistribution = getNodejsDistribution; + + +/***/ }), + +/***/ 57127: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(70957)); +class NightlyNodejs extends base_distribution_prerelease_1.default { + constructor(nodeInfo) { + super(nodeInfo); + this.distribution = 'nightly'; + } + getDistributionUrl() { + return 'https://nodejs.org/download/nightly'; + } +} +exports["default"] = NightlyNodejs; + + +/***/ }), + +/***/ 77854: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(42186)); +const tc = __importStar(__nccwpck_require__(27784)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +const base_distribution_1 = __importDefault(__nccwpck_require__(80007)); +class OfficialBuilds extends base_distribution_1.default { + constructor(nodeInfo) { + super(nodeInfo); + } + setupNodeJs() { + return __awaiter(this, void 0, void 0, function* () { + var _a; + let manifest; + let nodeJsVersions; + const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); + if (this.isLtsAlias(this.nodeInfo.versionSpec)) { + core.info('Attempt to resolve LTS alias from manifest...'); + // No try-catch since it's not possible to resolve LTS alias without manifest + manifest = yield this.getManifest(); + this.nodeInfo.versionSpec = this.resolveLtsAliasFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, manifest); + } + if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { + nodeJsVersions = yield this.getNodeJsVersions(); + const versions = this.filterVersions(nodeJsVersions); + this.nodeInfo.versionSpec = this.evaluateVersions(versions); + core.info('getting latest node version...'); + } + if (this.nodeInfo.checkLatest) { + core.info('Attempt to resolve the latest version from manifest...'); + const resolvedVersion = yield this.resolveVersionFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); + if (resolvedVersion) { + this.nodeInfo.versionSpec = resolvedVersion; + core.info(`Resolved as '${resolvedVersion}'`); + } + else { + core.info(`Failed to resolve version ${this.nodeInfo.versionSpec} from manifest`); + } + } + let toolPath = this.findVersionInHostedToolCacheDirectory(); + if (toolPath) { + core.info(`Found in cache @ ${toolPath}`); + this.addToolPath(toolPath); + return; + } + let downloadPath = ''; + try { + core.info(`Attempting to download ${this.nodeInfo.versionSpec}...`); + const versionInfo = yield this.getInfoFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); + if (versionInfo) { + core.info(`Acquiring ${versionInfo.resolvedVersion} - ${versionInfo.arch} from ${versionInfo.downloadUrl}`); + downloadPath = yield tc.downloadTool(versionInfo.downloadUrl, undefined, this.nodeInfo.auth); + if (downloadPath) { + toolPath = yield this.extractArchive(downloadPath, versionInfo, false); + } + } + else { + core.info('Not found in manifest. Falling back to download directly from Node'); + } + } + catch (err) { + // Rate limit? + if (err instanceof tc.HTTPError && + (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { + core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); + } + else { + core.info(err.message); + } + core.debug((_a = err.stack) !== null && _a !== void 0 ? _a : 'empty stack'); + core.info('Falling back to download directly from Node'); + } + if (!toolPath) { + toolPath = yield this.downloadDirectlyFromNode(); + } + if (this.osPlat != 'win32') { + toolPath = path_1.default.join(toolPath, 'bin'); + } + core.addPath(toolPath); + }); + } + addToolPath(toolPath) { + if (this.osPlat != 'win32') { + toolPath = path_1.default.join(toolPath, 'bin'); + } + core.addPath(toolPath); + } + downloadDirectlyFromNode() { + return __awaiter(this, void 0, void 0, function* () { + const nodeJsVersions = yield this.getNodeJsVersions(); + const versions = this.filterVersions(nodeJsVersions); + const evaluatedVersion = this.evaluateVersions(versions); + if (!evaluatedVersion) { + throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); + } + const toolName = this.getNodejsDistInfo(evaluatedVersion); + try { + const toolPath = yield this.downloadNodejs(toolName); + return toolPath; + } + catch (error) { + if (error instanceof tc.HTTPError && error.httpStatusCode === 404) { + core.warning(`Node version ${this.nodeInfo.versionSpec} for platform ${this.osPlat} and architecture ${this.nodeInfo.arch} was found but failed to download. ` + + 'This usually happens when downloadable binaries are not fully updated at https://nodejs.org/. ' + + 'To resolve this issue you may either fall back to the older version or try again later.'); + } + throw error; + } + }); + } + evaluateVersions(versions) { + let version = ''; + if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { + core.info(`getting latest node version...`); + return versions[0]; + } + version = super.evaluateVersions(versions); + return version; + } + getDistributionUrl() { + return `https://nodejs.org/dist`; + } + getManifest() { + core.debug('Getting manifest from actions/node-versions@main'); + return tc.getManifestFromRepo('actions', 'node-versions', this.nodeInfo.auth, 'main'); + } + resolveLtsAliasFromManifest(versionSpec, stable, manifest) { + var _a; + const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (!alias) { + throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); + } + core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); + // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. + const n = Number(alias); + const aliases = Object.fromEntries(manifest + .filter(x => x.lts && x.stable === stable) + .map(x => [x.lts.toLowerCase(), x]) + .reverse()); + const numbered = Object.values(aliases); + const release = alias === '*' + ? numbered[numbered.length - 1] + : n < 0 + ? numbered[numbered.length - 1 + n] + : aliases[alias]; + if (!release) { + throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); + } + core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); + return release.version.split('.')[0]; + } + resolveVersionFromManifest(versionSpec, stable, osArch, manifest) { + return __awaiter(this, void 0, void 0, function* () { + try { + const info = yield this.getInfoFromManifest(versionSpec, stable, osArch, manifest); + return info === null || info === void 0 ? void 0 : info.resolvedVersion; + } + catch (err) { + core.info('Unable to resolve version from manifest...'); + core.debug(err.message); + } + }); + } + getInfoFromManifest(versionSpec, stable, osArch, manifest) { + return __awaiter(this, void 0, void 0, function* () { + let info = null; + if (!manifest) { + core.debug('No manifest cached'); + manifest = yield this.getManifest(); + } + const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); + if (rel && rel.files.length > 0) { + info = {}; + info.resolvedVersion = rel.version; + info.arch = rel.files[0].arch; + info.downloadUrl = rel.files[0].download_url; + info.fileName = rel.files[0].filename; + } + return info; + }); + } + isLtsAlias(versionSpec) { + return versionSpec.startsWith('lts/'); + } + isLatestSyntax(versionSpec) { + return ['current', 'latest', 'node'].includes(versionSpec); + } +} +exports["default"] = OfficialBuilds; + + +/***/ }), + +/***/ 98837: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const base_distribution_1 = __importDefault(__nccwpck_require__(80007)); +class RcBuild extends base_distribution_1.default { + constructor(nodeInfo) { + super(nodeInfo); + } + getDistributionUrl() { + return 'https://nodejs.org/download/rc'; + } +} +exports["default"] = RcBuild; + + +/***/ }), + +/***/ 50969: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(70957)); +class CanaryBuild extends base_distribution_prerelease_1.default { + constructor(nodeInfo) { + super(nodeInfo); + this.distribution = 'v8-canary'; + } + getDistributionUrl() { + return 'https://nodejs.org/download/v8-canary'; + } +} +exports["default"] = CanaryBuild; + + +/***/ }), + +/***/ 70399: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const os_1 = __importDefault(__nccwpck_require__(22037)); +const auth = __importStar(__nccwpck_require__(17573)); +const path = __importStar(__nccwpck_require__(71017)); +const cache_restore_1 = __nccwpck_require__(19517); +const cache_utils_1 = __nccwpck_require__(41678); +const installer_factory_1 = __nccwpck_require__(85617); +const util_1 = __nccwpck_require__(92629); +const constants_1 = __nccwpck_require__(69042); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + // + // Version is optional. If supplied, install / use from the tool cache + // If not supplied then task is still used to setup proxy, auth, etc... + // + const version = resolveVersionInput(); + let arch = core.getInput('architecture'); + const cache = core.getInput('cache'); + // if architecture supplied but node-version is not + // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. + if (arch && !version) { + core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); + } + if (!arch) { + arch = os_1.default.arch(); + } + if (version) { + const token = core.getInput('token'); + const auth = !token ? undefined : `token ${token}`; + const stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; + const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; + const nodejsInfo = { + versionSpec: version, + checkLatest, + auth, + stable, + arch + }; + const nodeDistribution = (0, installer_factory_1.getNodejsDistribution)(nodejsInfo); + yield nodeDistribution.setupNodeJs(); + } + yield (0, util_1.printEnvDetailsAndSetOutput)(); + const registryUrl = core.getInput('registry-url'); + const alwaysAuth = core.getInput('always-auth'); + if (registryUrl) { + auth.configAuthentication(registryUrl, alwaysAuth); + } + if (cache && (0, cache_utils_1.isCacheFeatureAvailable)()) { + core.saveState(constants_1.State.CachePackageManager, cache); + const cacheDependencyPath = core.getInput('cache-dependency-path'); + yield (0, cache_restore_1.restoreCache)(cache, cacheDependencyPath); + } + const matchersPath = path.join(__dirname, '../..', '.github'); + core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); + } + catch (err) { + core.setFailed(err.message); + } + }); +} +exports.run = run; +function resolveVersionInput() { + let version = core.getInput('node-version'); + const versionFileInput = core.getInput('node-version-file'); + if (version && versionFileInput) { + core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); + } + if (version) { + return version; + } + if (versionFileInput) { + const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); + const parsedVersion = (0, util_1.getNodeVersionFromFile)(versionFilePath); + if (parsedVersion) { + version = parsedVersion; + } + else { + core.warning(`Could not determine node version from ${versionFilePath}. Falling back`); + } + core.info(`Resolved ${versionFileInput} as ${version}`); + } + return version; +} + + +/***/ }), + +/***/ 92629: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unique = exports.printEnvDetailsAndSetOutput = exports.getNodeVersionFromFile = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const exec = __importStar(__nccwpck_require__(71514)); +const io = __importStar(__nccwpck_require__(47351)); +const fs_1 = __importDefault(__nccwpck_require__(57147)); +const path_1 = __importDefault(__nccwpck_require__(71017)); +function getNodeVersionFromFile(versionFilePath) { + var _a, _b, _c, _d, _e; + if (!fs_1.default.existsSync(versionFilePath)) { + throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); + } + const contents = fs_1.default.readFileSync(versionFilePath, 'utf8'); + // Try parsing the file as an NPM `package.json` file. + try { + const manifest = JSON.parse(contents); + // Presume package.json file. + if (typeof manifest === 'object' && !!manifest) { + // Support Volta. + // See https://docs.volta.sh/guide/understanding#managing-your-project + if ((_a = manifest.volta) === null || _a === void 0 ? void 0 : _a.node) { + return manifest.volta.node; + } + if ((_b = manifest.engines) === null || _b === void 0 ? void 0 : _b.node) { + return manifest.engines.node; + } + // Support Volta workspaces. + // See https://docs.volta.sh/advanced/workspaces + if ((_c = manifest.volta) === null || _c === void 0 ? void 0 : _c.extends) { + const extendedFilePath = path_1.default.resolve(path_1.default.dirname(versionFilePath), manifest.volta.extends); + core.info('Resolving node version from ' + extendedFilePath); + return getNodeVersionFromFile(extendedFilePath); + } + // If contents are an object, we parsed JSON + // this can happen if node-version-file is a package.json + // yet contains no volta.node or engines.node + // + // If node-version file is _not_ JSON, control flow + // will not have reached these lines. + // + // And because we've reached here, we know the contents + // *are* JSON, so no further string parsing makes sense. + return null; + } + } + catch (_f) { + core.info('Node version file is not JSON file'); + } + const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); + return (_e = (_d = found === null || found === void 0 ? void 0 : found.groups) === null || _d === void 0 ? void 0 : _d.version) !== null && _e !== void 0 ? _e : contents.trim(); +} +exports.getNodeVersionFromFile = getNodeVersionFromFile; +function printEnvDetailsAndSetOutput() { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Environment details'); + const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { + const pathTool = yield io.which(tool, false); + const output = pathTool ? yield getToolVersion(tool, ['--version']) : ''; + return { tool, output }; + })); + const tools = yield Promise.all(promises); + tools.forEach(({ tool, output }) => { + if (tool === 'node') { + core.setOutput(`${tool}-version`, output); + } + core.info(`${tool}: ${output}`); + }); + core.endGroup(); + }); +} +exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; +function getToolVersion(tool, options) { + return __awaiter(this, void 0, void 0, function* () { + try { + const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { + ignoreReturnCode: true, + silent: true + }); + if (exitCode > 0) { + core.info(`[warning]${stderr}`); + return ''; + } + return stdout.trim(); + } + catch (err) { + return ''; + } + }); +} +const unique = () => { + const encountered = new Set(); + return (value) => { + if (encountered.has(value)) + return false; + encountered.add(value); + return true; + }; +}; +exports.unique = unique; + + +/***/ }), + +/***/ 22877: +/***/ ((module) => { + +module.exports = eval("require")("encoding"); + + +/***/ }), + +/***/ 39491: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 50852: +/***/ ((module) => { + +"use strict"; +module.exports = require("async_hooks"); + +/***/ }), + +/***/ 14300: +/***/ ((module) => { + +"use strict"; +module.exports = require("buffer"); + +/***/ }), + +/***/ 32081: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 96206: +/***/ ((module) => { + +"use strict"; +module.exports = require("console"); + +/***/ }), + +/***/ 6113: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + +/***/ 67643: +/***/ ((module) => { + +"use strict"; +module.exports = require("diagnostics_channel"); + +/***/ }), + +/***/ 82361: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 57147: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 13685: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 85158: +/***/ ((module) => { + +"use strict"; +module.exports = require("http2"); + +/***/ }), + +/***/ 95687: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 41808: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 15673: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 88849: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:http"); + +/***/ }), + +/***/ 22286: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:https"); + +/***/ }), + +/***/ 70612: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:os"); + +/***/ }), + +/***/ 97742: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:process"); + +/***/ }), + +/***/ 84492: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:stream"); + +/***/ }), + +/***/ 47261: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + +/***/ 65628: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:zlib"); + +/***/ }), + +/***/ 22037: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 71017: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 4074: +/***/ ((module) => { + +"use strict"; +module.exports = require("perf_hooks"); + +/***/ }), + +/***/ 85477: +/***/ ((module) => { + +"use strict"; +module.exports = require("punycode"); + +/***/ }), + +/***/ 63477: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + +/***/ 12781: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 35356: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream/web"); + +/***/ }), + +/***/ 71576: +/***/ ((module) => { + +"use strict"; +module.exports = require("string_decoder"); + +/***/ }), + +/***/ 39512: +/***/ ((module) => { + +"use strict"; +module.exports = require("timers"); + +/***/ }), + +/***/ 24404: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 76224: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + +/***/ 57310: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 73837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 29830: +/***/ ((module) => { + +"use strict"; +module.exports = require("util/types"); + +/***/ }), + +/***/ 71267: +/***/ ((module) => { + +"use strict"; +module.exports = require("worker_threads"); + +/***/ }), + +/***/ 59796: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + +/***/ }), + +/***/ 31875: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureKeyCredential = void 0; +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +exports.AzureKeyCredential = AzureKeyCredential; +//# sourceMappingURL=azureKeyCredential.js.map + +/***/ }), + +/***/ 51377: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureNamedKeyCredential = void 0; +exports.isNamedKeyCredential = isNamedKeyCredential; +const core_util_1 = __nccwpck_require__(80637); +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return ((0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map + +/***/ }), + +/***/ 27182: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureSASCredential = void 0; +exports.isSASCredential = isSASCredential; +const core_util_1 = __nccwpck_require__(80637); +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +exports.AzureSASCredential = AzureSASCredential; +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return ((0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"); +} +//# sourceMappingURL=azureSASCredential.js.map + +/***/ }), + +/***/ 98834: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isTokenCredential = exports.isSASCredential = exports.AzureSASCredential = exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = exports.isKeyCredential = exports.AzureKeyCredential = void 0; +var azureKeyCredential_js_1 = __nccwpck_require__(31875); +Object.defineProperty(exports, "AzureKeyCredential", ({ enumerable: true, get: function () { return azureKeyCredential_js_1.AzureKeyCredential; } })); +var keyCredential_js_1 = __nccwpck_require__(59122); +Object.defineProperty(exports, "isKeyCredential", ({ enumerable: true, get: function () { return keyCredential_js_1.isKeyCredential; } })); +var azureNamedKeyCredential_js_1 = __nccwpck_require__(51377); +Object.defineProperty(exports, "AzureNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; } })); +Object.defineProperty(exports, "isNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.isNamedKeyCredential; } })); +var azureSASCredential_js_1 = __nccwpck_require__(27182); +Object.defineProperty(exports, "AzureSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.AzureSASCredential; } })); +Object.defineProperty(exports, "isSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.isSASCredential; } })); +var tokenCredential_js_1 = __nccwpck_require__(39162); +Object.defineProperty(exports, "isTokenCredential", ({ enumerable: true, get: function () { return tokenCredential_js_1.isTokenCredential; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 59122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isKeyCredential = isKeyCredential; +const core_util_1 = __nccwpck_require__(80637); +/** + * Tests an object to determine whether it implements KeyCredential. + * + * @param credential - The assumed KeyCredential to be tested. + */ +function isKeyCredential(credential) { + return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; +} +//# sourceMappingURL=keyCredential.js.map + +/***/ }), + +/***/ 39162: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isBearerToken = isBearerToken; +exports.isPopToken = isPopToken; +exports.isTokenCredential = isTokenCredential; +/** + * @internal + * @param accessToken - Access token + * @returns Whether a token is bearer type or not + */ +function isBearerToken(accessToken) { + return !accessToken.tokenType || accessToken.tokenType === "Bearer"; +} +/** + * @internal + * @param accessToken - Access token + * @returns Whether a token is Pop token or not + */ +function isPopToken(accessToken) { + return accessToken.tokenType === "pop"; +} +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map + +/***/ }), + +/***/ 94873: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.authorizeRequestOnClaimChallenge = exports.parseCAEChallenge = void 0; +const log_js_1 = __nccwpck_require__(53776); +const base64_js_1 = __nccwpck_require__(23442); +/** + * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. + * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. + * + * @internal + */ +function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); + }); +} +exports.parseCAEChallenge = parseCAEChallenge; +/** + * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: + * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). + * + * Call the `bearerTokenAuthenticationPolicy` with the following options: + * + * ```ts + * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; + * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; + * + * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ + * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge + * }); + * ``` + * + * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. + * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. + * + * Example challenge with claims: + * + * ``` + * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", + * error_description="User session has been revoked", + * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" + * ``` + */ +async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || log_js_1.logger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims), + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + return true; +} +exports.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; +//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map + +/***/ }), + +/***/ 16576: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.authorizeRequestOnTenantChallenge = void 0; +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); +} +/** + * Defines a callback to handle auth challenge for Storage APIs. + * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge + * Handling has specific features for storage that departs to the general AAD challenge docs. + **/ +const authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); + return true; + } + return false; +}; +exports.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; +/** + * Extracts the tenant id from the challenge information + * The tenant id is contained in the authorization_uri as the first + * path part. + */ +function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return undefined; +} +/** + * Builds the authentication scopes based on the information that comes in the + * challenge information. Scopes url is present in the resource_id, if it is empty + * we keep using the original scopes. + */ +function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + // the extra slash is required by the service + scope = "https://disk.azure.com//.default"; + } + return [scope]; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +/** + * Extracts the options form a Pipeline Request for later re-use + */ +function requestToOptions(request) { + return { + abortSignal: request.abortSignal, + requestOptions: { + timeout: request.timeout, + }, + tracingOptions: request.tracingOptions, + }; +} +//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map + +/***/ }), + +/***/ 23442: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decodeStringToString = exports.decodeString = exports.encodeByteArray = exports.encodeString = void 0; +/** + * Encodes a string in base64 format. + * @param value - the string to encode + * @internal + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +exports.encodeString = encodeString; +/** + * Encodes a byte array in base64 format. + * @param value - the Uint8Aray to encode + * @internal + */ +function encodeByteArray(value) { + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +exports.encodeByteArray = encodeByteArray; +/** + * Decodes a base64 string into a byte array. + * @param value - the base64 string to decode + * @internal + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} +exports.decodeString = decodeString; +/** + * Decodes a base64 string into a string. + * @param value - the base64 string to decode + * @internal + */ +function decodeStringToString(value) { + return Buffer.from(value, "base64").toString(); +} +exports.decodeStringToString = decodeStringToString; +//# sourceMappingURL=base64.js.map + +/***/ }), + +/***/ 25315: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deserializationPolicy = exports.deserializationPolicyName = void 0; +const interfaces_js_1 = __nccwpck_require__(8153); +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const serializer_js_1 = __nccwpck_require__(63566); +const operationHelpers_js_1 = __nccwpck_require__(62074); +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * The programmatic identifier of the deserializationPolicy. + */ +exports.deserializationPolicyName = "deserializationPolicy"; +/** + * This policy handles parsing out responses according to OperationSpecs on the request. + */ +function deserializationPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; + const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", + includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, + xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : interfaces_js_1.XML_CHARKEY, + }, + }; + return { + name: exports.deserializationPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + }, + }; +} +exports.deserializationPolicy = deserializationPolicy; +function getOperationResponseMap(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (operationSpec) { + if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const request = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (deserializeError) { + const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); + } + } + return parsedResponse; +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new core_rest_pipeline_1.RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); + } + const internalError = parsedBody.error || deserializedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = deserializedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = + operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + var _a; + if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && + operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + try { + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } + } + catch (err) { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR; + const e = new core_rest_pipeline_1.RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse, + }); + throw e; + } + } + return operationResponse; +} +//# sourceMappingURL=deserializationPolicy.js.map + +/***/ }), + +/***/ 30308: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCachedDefaultHttpClient = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); + } + return cachedHttpClient; +} +exports.getCachedDefaultHttpClient = getCachedDefaultHttpClient; +//# sourceMappingURL=httpClientCache.js.map + +/***/ }), + +/***/ 7611: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.authorizeRequestOnTenantChallenge = exports.authorizeRequestOnClaimChallenge = exports.serializationPolicyName = exports.serializationPolicy = exports.deserializationPolicyName = exports.deserializationPolicy = exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.createClientPipeline = exports.ServiceClient = exports.MapperTypeNames = exports.createSerializer = void 0; +var serializer_js_1 = __nccwpck_require__(63566); +Object.defineProperty(exports, "createSerializer", ({ enumerable: true, get: function () { return serializer_js_1.createSerializer; } })); +Object.defineProperty(exports, "MapperTypeNames", ({ enumerable: true, get: function () { return serializer_js_1.MapperTypeNames; } })); +var serviceClient_js_1 = __nccwpck_require__(28927); +Object.defineProperty(exports, "ServiceClient", ({ enumerable: true, get: function () { return serviceClient_js_1.ServiceClient; } })); +var pipeline_js_1 = __nccwpck_require__(33924); +Object.defineProperty(exports, "createClientPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createClientPipeline; } })); +var interfaces_js_1 = __nccwpck_require__(8153); +Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_ATTRKEY; } })); +Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_CHARKEY; } })); +var deserializationPolicy_js_1 = __nccwpck_require__(25315); +Object.defineProperty(exports, "deserializationPolicy", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicy; } })); +Object.defineProperty(exports, "deserializationPolicyName", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicyName; } })); +var serializationPolicy_js_1 = __nccwpck_require__(96625); +Object.defineProperty(exports, "serializationPolicy", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicy; } })); +Object.defineProperty(exports, "serializationPolicyName", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicyName; } })); +var authorizeRequestOnClaimChallenge_js_1 = __nccwpck_require__(94873); +Object.defineProperty(exports, "authorizeRequestOnClaimChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; } })); +var authorizeRequestOnTenantChallenge_js_1 = __nccwpck_require__(16576); +Object.defineProperty(exports, "authorizeRequestOnTenantChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 41459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getPathStringFromParameter = exports.getStreamingResponseStatusCodes = void 0; +const serializer_js_1 = __nccwpck_require__(63566); +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamingResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +exports.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + * @internal + */ +function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +exports.getPathStringFromParameter = getPathStringFromParameter; +//# sourceMappingURL=interfaceHelpers.js.map + +/***/ }), + +/***/ 8153: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; +/** + * Default key used to access the XML attributes. + */ +exports.XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +exports.XML_CHARKEY = "_"; +//# sourceMappingURL=interfaces.js.map + +/***/ }), + +/***/ 53776: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +const logger_1 = __nccwpck_require__(89497); +exports.logger = (0, logger_1.createClientLogger)("core-client"); +//# sourceMappingURL=log.js.map + +/***/ }), + +/***/ 62074: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOperationRequestInfo = exports.getOperationArgumentValueFromParameter = void 0; +const state_js_1 = __nccwpck_require__(35429); +/** + * @internal + * Retrieves the value to use for a given operation argument + * @param operationArguments - The arguments passed from the generated client + * @param parameter - The parameter description + * @param fallbackObject - If something isn't found in the arguments bag, look here. + * Generally used to look at the service client properties. + */ +function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper, + }, fallbackObject); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +exports.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +const originalRequestSymbol = Symbol.for("@azure/core-client original request"); +function hasOriginalRequest(request) { + return originalRequestSymbol in request; +} +function getOperationRequestInfo(request) { + if (hasOriginalRequest(request)) { + return getOperationRequestInfo(request[originalRequestSymbol]); + } + let info = state_js_1.state.operationRequestMap.get(request); + if (!info) { + info = {}; + state_js_1.state.operationRequestMap.set(request, info); + } + return info; +} +exports.getOperationRequestInfo = getOperationRequestInfo; +//# sourceMappingURL=operationHelpers.js.map + +/***/ }), + +/***/ 33924: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createClientPipeline = void 0; +const deserializationPolicy_js_1 = __nccwpck_require__(25315); +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const serializationPolicy_js_1 = __nccwpck_require__(96625); +/** + * Creates a new Pipeline for use with a Service Client. + * Adds in deserializationPolicy by default. + * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. + * @param options - Options to customize the created pipeline. + */ +function createClientPipeline(options = {}) { + const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options !== null && options !== void 0 ? options : {}); + if (options.credentialOptions) { + pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes, + })); + } + pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), { + phase: "Deserialize", + }); + return pipeline; +} +exports.createClientPipeline = createClientPipeline; +//# sourceMappingURL=pipeline.js.map + +/***/ }), + +/***/ 96625: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.serializeRequestBody = exports.serializeHeaders = exports.serializationPolicy = exports.serializationPolicyName = void 0; +const interfaces_js_1 = __nccwpck_require__(8153); +const operationHelpers_js_1 = __nccwpck_require__(62074); +const serializer_js_1 = __nccwpck_require__(63566); +const interfaceHelpers_js_1 = __nccwpck_require__(41459); +/** + * The programmatic identifier of the serializationPolicy. + */ +exports.serializationPolicyName = "serializationPolicy"; +/** + * This policy handles assembling the request body and headers using + * an OperationSpec and OperationArguments on the request. + */ +function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: exports.serializationPolicyName, + async sendRequest(request, next) { + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request, operationArguments, operationSpec); + serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); + } + return next(request); + }, + }; +} +exports.serializationPolicy = serializationPolicy; +/** + * @internal + */ +function serializeHeaders(request, operationArguments, operationSpec) { + var _a, _b; + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); + if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + request.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue); + } + } + } + } + const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request.headers.set(customHeaderName, customHeaders[customHeaderName]); + } + } +} +exports.serializeHeaders = serializeHeaders; +/** + * @internal + */ +function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { + throw new Error("XML serialization unsupported!"); +}) { + var _a, _b, _c, _d, _e; + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", + includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, + xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : interfaces_js_1.XML_CHARKEY, + }, + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((request.body !== undefined && request.body !== null) || + (nullable && request.body === null) || + required) { + const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody); + request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === serializer_js_1.MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); + if (typeName === serializer_js_1.MapperTypeNames.Sequence) { + request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } + else if (!isStream) { + request.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === serializer_js_1.MapperTypeNames.String && + (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + request.body = JSON.stringify(request.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter); + request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions); + } + } + } +} +exports.serializeRequestBody = serializeRequestBody; +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +//# sourceMappingURL=serializationPolicy.js.map + +/***/ }), + +/***/ 63566: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MapperTypeNames = exports.createSerializer = void 0; +const tslib_1 = __nccwpck_require__(4351); +const base64 = tslib_1.__importStar(__nccwpck_require__(23442)); +const interfaces_js_1 = __nccwpck_require__(8153); +const utils_js_1 = __nccwpck_require__(25363); +class SerializerImpl { + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== undefined && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + var _a, _b, _c; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, + }, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === undefined || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === undefined || object === null) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param responseBody - A valid Javascript entity to be deserialized + * + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object + */ + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + var _a, _b, _c, _d; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, + }, + ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, + }; + if (responseBody === undefined || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (responseBody[interfaces_js_1.XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +/** + * Method that creates and returns a Serializer. + * @param modelMappers - Known models to map + * @param isXML - If XML should be supported + */ +function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); +} +exports.createSerializer = createSerializer; +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && + objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value !== undefined && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!(0, utils_js_1.isDuration)(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a; + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== undefined && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === undefined || childObject === null) && + ((object[key] !== undefined && object[key] !== null) || + propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== undefined && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[interfaces_js_1.XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[interfaces_js_1.XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + (toSerialize === undefined || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName !== undefined && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {}; + parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[interfaces_js_1.XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : interfaces_js_1.XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + // only accept null when reaching the last position of object otherwise it would be undefined + if (res === null && steps < paths.length) { + res = undefined; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + (propertyInstance === undefined || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + /* jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + var _a; + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (element.type.name === "Composite" && element.type.className) { + element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName + ? discriminatorValue + : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } + else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && + mapper.type.uberParent === currentName && + mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } + } + } + } + return undefined; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var _a; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + // The serializedName might have \\, which we just want to ignore + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Known types of Mappers + */ +exports.MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime", +}; +//# sourceMappingURL=serializer.js.map + +/***/ }), + +/***/ 28927: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceClient = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const pipeline_js_1 = __nccwpck_require__(33924); +const utils_js_1 = __nccwpck_require__(25363); +const httpClientCache_js_1 = __nccwpck_require__(30308); +const operationHelpers_js_1 = __nccwpck_require__(62074); +const urlHelpers_js_1 = __nccwpck_require__(98258); +const interfaceHelpers_js_1 = __nccwpck_require__(41459); +const log_js_1 = __nccwpck_require__(53776); +/** + * Initializes a new instance of the ServiceClient. + */ +class ServiceClient { + /** + * The ServiceClient constructor + * @param credential - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + var _a, _b; + this._requestContentType = options.requestContentType; + this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; + if (options.baseUri) { + log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { + for (const { policy, position } of options.additionalPolicies) { + // Sign happens after Retry and is commonly needed to occur + // before policies that intercept post-retry. + const afterPhase = position === "perRetry" ? "Sign" : undefined; + this.pipeline.addPolicy(policy, { + afterPhase, + }); + } + } + } + /** + * Send the provided httpRequest. + */ + async sendRequest(request) { + return this.pipeline.sendRequest(this._httpClient, request); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint = operationSpec.baseUrl || this._endpoint; + if (!endpoint) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); + } + // Templatized URLs sometimes reference properties on the ServiceClient child class, + // so we have to pass `this` below in order to search these properties if they're + // not part of OperationArguments + const url = (0, urlHelpers_js_1.getRequestUrl)(endpoint, operationSpec, operationArguments, this); + const request = (0, core_rest_pipeline_1.createPipelineRequest)({ + url, + }); + request.method = operationSpec.httpMethod; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== undefined) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request.allowInsecureConnection = true; + } + if (request.streamResponseStatusCodes === undefined) { + request.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request); + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } + catch (error) { + if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { + const rawResponse = error.response; + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); + error.details = flatResponse; + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse, error); + } + } + throw error; + } + } +} +exports.ServiceClient = ServiceClient; +function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes + ? { credentialScopes, credential: options.credential } + : undefined; + return (0, pipeline_js_1.createClientPipeline)(Object.assign(Object.assign({}, options), { credentialOptions })); +} +function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; + } + if (options.endpoint) { + return `${options.endpoint}/.default`; + } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map + +/***/ }), + +/***/ 35429: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.state = void 0; +/** + * Holds the singleton operationRequestMap, to be shared across CJS and ESM imports. + */ +exports.state = { + operationRequestMap: new WeakMap(), +}; +//# sourceMappingURL=state-cjs.cjs.map + +/***/ }), + +/***/ 98258: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.appendQueryParams = exports.getRequestUrl = void 0; +const operationHelpers_js_1 = __nccwpck_require__(62074); +const interfaceHelpers_js_1 = __nccwpck_require__(41459); +const CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: "\t", + Pipes: "|", +}; +function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path = replaceAll(operationSpec.path, urlReplacements); + // QUIRK: sometimes we get a path component like /{nextLink} + // which may be a fully formed URL with a leading /. In that case, we should + // remove the leading / + if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { + path = path.substring(1); + } + // QUIRK: sometimes we get a path component like {nextLink} + // which may be a fully formed URL. In that case, we should + // ignore the baseUri. + if (isAbsoluteUrl(path)) { + requestUrl = path; + isAbsolutePath = true; + } + else { + requestUrl = appendPath(requestUrl, path); + } + } + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + /** + * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` + * is an absolute path. This ensures that existing query parameter values in `requestUrl` + * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it + * is still being built so there is nothing to overwrite. + */ + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; +} +exports.getRequestUrl = getRequestUrl; +function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); + } + return result; +} +function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); + const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); + } + } + return result; +} +function isAbsoluteUrl(url) { + return url.includes("://"); +} +function appendPath(url, pathToAppend) { + if (!pathToAppend) { + return url; + } + const parsedUrl = new URL(url); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; + } + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); + } + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } + } + else { + newPath = newPath + pathToAppend; + } + parsedUrl.pathname = newPath; + return parsedUrl.toString(); +} +function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + const sequenceParams = new Set(); + if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); + } + let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); + if ((queryParameterValue !== undefined && queryParameterValue !== null) || + queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter)); + const delimiter = queryParameter.collectionFormat + ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] + : ""; + if (Array.isArray(queryParameterValue)) { + // replace null and undefined + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === undefined) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } + else if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + // Join pipes and CSV *after* encoding, or the server will be upset. + if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue); + } + } + } + return { + queryParams: result, + sequenceParams, + }; +} +function simpleParseQueryParams(queryString) { + const result = new Map(); + if (!queryString || queryString[0] !== "?") { + return result; + } + // remove the leading ? + queryString = queryString.slice(1); + const pairs = queryString.split("&"); + for (const pair of pairs) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } + else { + result.set(name, [existingValue, value]); + } + } + else { + result.set(name, value); + } + } + return result; +} +/** @internal */ +function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url; + } + const parsedUrl = new URL(url); + // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which + // can change their meaning to the server, such as in the case of a SAS signature. + // To avoid accidentally un-encoding a query param, we parse the key/values ourselves + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } + else { + existingValue.push(value); + } + } + else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } + else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } + } + else { + combinedParams.set(name, value); + } + } + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } + else if (Array.isArray(value)) { + // QUIRK: If we get an array of values, include multiple key/value pairs + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } + else { + searchPieces.push(`${name}=${value}`); + } + } + // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); +} +exports.appendQueryParams = appendQueryParams; +//# sourceMappingURL=urlHelpers.js.map + +/***/ }), + +/***/ 25363: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.flattenResponse = exports.isValidUuid = exports.isDuration = exports.isPrimitiveBody = void 0; +/** + * A type guard for a primitive response body. + * @param value - Value to test + * + * @internal + */ +function isPrimitiveBody(value, mapperTypeName) { + return (mapperTypeName !== "Composite" && + mapperTypeName !== "Dictionary" && + (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== + null || + value === undefined || + value === null)); +} +exports.isPrimitiveBody = isPrimitiveBody; +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Returns true if the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @internal + */ +function isDuration(value) { + return validateISODuration.test(value); +} +exports.isDuration = isDuration; +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Returns true if the provided uuid is valid. + * + * @param uuid - The uuid that needs to be validated. + * + * @internal + */ +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +exports.isValidUuid = isValidUuid; +/** + * Maps the response as follows: + * - wraps the response body if needed (typically if its type is primitive). + * - returns null if the combination of the headers and the body is empty. + * - otherwise, returns the combination of the headers and the body. + * + * @param responseObject - a representation of the parsed response + * @returns the response that will be returned to the user which can be null and/or wrapped + * + * @internal + */ +function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); + if (responseObject.hasNullableType && + Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; + } + else { + return responseObject.shouldWrapBody + ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; + } +} +/** + * Take a `FullOperationResponse` and turn it into a flat + * response object to hand back to the consumer. + * @param fullResponse - The processed response from the operation request + * @param responseSpec - The response map from the OperationSpec + * + * @internal + */ +function flattenResponse(fullResponse, responseSpec) { + var _a, _b; + const parsedHeaders = fullResponse.parsedHeaders; + // head methods never have a body, but we return a boolean set to body property + // to indicate presence/absence of the resource + if (fullResponse.request.method === "HEAD") { + return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); + } + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); + const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; + /** If the body is asked for, we look at the expected body type to handle it */ + if (expectedBodyTypeName === "Stream") { + return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); + } + const modelProperties = (expectedBodyTypeName === "Composite" && + bodyMapper.type.modelProperties) || + {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + return isNullable && + !fullResponse.parsedBody && + !parsedHeaders && + Object.getOwnPropertyNames(modelProperties).length === 0 + ? null + : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), + }); +} +exports.flattenResponse = flattenResponse; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 35064: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExtendedServiceClient = void 0; +const disableKeepAlivePolicy_js_1 = __nccwpck_require__(66899); +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const core_client_1 = __nccwpck_require__(7611); +const response_js_1 = __nccwpck_require__(49283); +/** + * Client to provide compatability between core V1 & V2. + */ +class ExtendedServiceClient extends core_client_1.ServiceClient { + constructor(options) { + var _a, _b; + super(options); + if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && + !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) { + this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)()); + } + if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { + this.pipeline.removePolicy({ + name: core_rest_pipeline_1.redirectPolicyName, + }); + } + } + /** + * Compatible send operation request function. + * + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns + */ + async sendOperationRequest(operationArguments, operationSpec) { + var _a; + const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error); + } + } + operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: (0, response_js_1.toCompatResponse)(lastResponse), + }); + } + return result; + } +} +exports.ExtendedServiceClient = ExtendedServiceClient; +//# sourceMappingURL=extendedClient.js.map + +/***/ }), + +/***/ 23256: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertHttpClient = void 0; +const response_js_1 = __nccwpck_require__(49283); +const util_js_1 = __nccwpck_require__(43732); +/** + * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. + * @param requestPolicyClient - A HttpClient compatible with core-http + * @returns A HttpClient compatible with core-rest-pipeline + */ +function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request) => { + const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request, { createProxy: true })); + return (0, response_js_1.toPipelineResponse)(response); + }, + }; +} +exports.convertHttpClient = convertHttpClient; +//# sourceMappingURL=httpClientAdapter.js.map + +/***/ }), + +/***/ 25083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toHttpHeadersLike = exports.convertHttpClient = exports.disableKeepAlivePolicyName = exports.HttpPipelineLogLevel = exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.ExtendedServiceClient = void 0; +/** + * A Shim Library that provides compatibility between Core V1 & V2 Packages. + * + * @packageDocumentation + */ +var extendedClient_js_1 = __nccwpck_require__(35064); +Object.defineProperty(exports, "ExtendedServiceClient", ({ enumerable: true, get: function () { return extendedClient_js_1.ExtendedServiceClient; } })); +var requestPolicyFactoryPolicy_js_1 = __nccwpck_require__(98241); +Object.defineProperty(exports, "requestPolicyFactoryPolicyName", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; } })); +Object.defineProperty(exports, "createRequestPolicyFactoryPolicy", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; } })); +Object.defineProperty(exports, "HttpPipelineLogLevel", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; } })); +var disableKeepAlivePolicy_js_1 = __nccwpck_require__(66899); +Object.defineProperty(exports, "disableKeepAlivePolicyName", ({ enumerable: true, get: function () { return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; } })); +var httpClientAdapter_js_1 = __nccwpck_require__(23256); +Object.defineProperty(exports, "convertHttpClient", ({ enumerable: true, get: function () { return httpClientAdapter_js_1.convertHttpClient; } })); +var util_js_1 = __nccwpck_require__(43732); +Object.defineProperty(exports, "toHttpHeadersLike", ({ enumerable: true, get: function () { return util_js_1.toHttpHeadersLike; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 66899: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pipelineContainsDisableKeepAlivePolicy = exports.createDisableKeepAlivePolicy = exports.disableKeepAlivePolicyName = void 0; +exports.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; +function createDisableKeepAlivePolicy() { + return { + name: exports.disableKeepAlivePolicyName, + async sendRequest(request, next) { + request.disableKeepAlive = true; + return next(request); + }, + }; +} +exports.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; +/** + * @internal + */ +function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === exports.disableKeepAlivePolicyName); +} +exports.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; +//# sourceMappingURL=disableKeepAlivePolicy.js.map + +/***/ }), + +/***/ 98241: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.HttpPipelineLogLevel = void 0; +const util_js_1 = __nccwpck_require__(43732); +const response_js_1 = __nccwpck_require__(49283); +/** + * An enum for compatibility with RequestPolicy + */ +var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; +})(HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); +const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, +}; +/** + * The name of the RequestPolicyFactoryPolicy + */ +exports.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; +/** + * A policy that wraps policies written for core-http. + * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline + */ +function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: exports.requestPolicyFactoryPolicyName, + async sendRequest(request, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response = await next((0, util_js_1.toPipelineRequest)(httpRequest)); + return (0, response_js_1.toCompatResponse)(response, { createProxy: true }); + }, + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = (0, util_js_1.toWebResourceLike)(request, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return (0, response_js_1.toPipelineResponse)(response); + }, + }; +} +exports.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; +//# sourceMappingURL=requestPolicyFactoryPolicy.js.map + +/***/ }), + +/***/ 49283: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPipelineResponse = exports.toCompatResponse = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +const util_js_1 = __nccwpck_require__(43732); +const originalResponse = Symbol("Original FullOperationResponse"); +/** + * A helper to convert response objects from the new pipeline back to the old one. + * @param response - A response object from core-client. + * @returns A response compatible with `HttpOperationResponse` from core-http. + */ +function toCompatResponse(response, options) { + let request = (0, util_js_1.toWebResourceLike)(response.request); + let headers = (0, util_js_1.toHttpHeadersLike)(response.headers); + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } + else if (prop === "request") { + return request; + } + else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } + else if (prop === "request") { + request = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return Object.assign(Object.assign({}, response), { request, + headers }); + } +} +exports.toCompatResponse = toCompatResponse; +/** + * A helper to convert back to a PipelineResponse + * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. + */ +function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } + else { + return Object.assign(Object.assign({}, compatResponse), { headers, request: (0, util_js_1.toPipelineRequest)(compatResponse.request) }); + } +} +exports.toPipelineResponse = toPipelineResponse; +//# sourceMappingURL=response.js.map + +/***/ }), + +/***/ 43732: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpHeaders = exports.toHttpHeadersLike = exports.toWebResourceLike = exports.toPipelineRequest = void 0; +const core_rest_pipeline_1 = __nccwpck_require__(29146); +// We use a custom symbol to cache a reference to the original request without +// exposing it on the public interface. +const originalRequestSymbol = Symbol("Original PipelineRequest"); +// Symbol.for() will return the same symbol if it's already been created +// This particular one is used in core-client to handle the case of when a request is +// cloned but we need to retrieve the OperationSpec and OperationArguments from the +// original request. +const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); +function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request = compatWebResource[originalRequestSymbol]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); + if (request) { + request.headers = headers; + return request; + } + else { + const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = + options.originalRequest; + } + return newRequest; + } +} +exports.toPipelineRequest = toPipelineRequest; +function toWebResourceLike(request, options) { + var _a; + const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; + const webResource = { + url: request.url, + method: request.method, + headers: toHttpHeadersLike(request.headers), + withCredentials: request.withCredentials, + timeout: request.timeout, + requestId: request.headers.get("x-ms-client-request-id") || request.requestId, + abortSignal: request.abortSignal, + body: request.body, + formData: request.formData, + keepAlive: !!request.disableKeepAlive, + onDownloadProgress: request.onDownloadProgress, + onUploadProgress: request.onUploadProgress, + proxySettings: request.proxySettings, + streamResponseStatusCodes: request.streamResponseStatusCodes, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + /** do nothing */ + }, + }; + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request; + } + else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest, + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return webResource; + } +} +exports.toWebResourceLike = toWebResourceLike; +/** + * Converts HttpHeaders from core-rest-pipeline to look like + * HttpHeaders from core-http. + * @param headers - HttpHeaders from core-rest-pipeline + * @returns HttpHeaders as they looked in core-http + */ +function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); +} +exports.toHttpHeadersLike = toHttpHeadersLike; +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +/** + * A collection of HTTP header key/value pairs. + */ +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +exports.HttpHeaders = HttpHeaders; +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 57759: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pollHttpOperation = exports.isOperationError = exports.getResourceLocation = exports.getOperationStatus = exports.getOperationLocation = exports.initHttpOperation = exports.getStatusFromInitialResponse = exports.getErrorFromResponse = exports.parseRetryAfter = exports.inferLroMode = void 0; +const operation_js_1 = __nccwpck_require__(70281); +const logger_js_1 = __nccwpck_require__(28121); +function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; +} +function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(inputs) { + var _a; + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return undefined; + } + case "PATCH": { + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; + } + default: { + return getDefault(); + } + } + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return undefined; + } + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; + } + } + } +} +function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); + if (pollingUrl !== undefined) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig, + }), + }; + } + else if (location !== undefined) { + return { + mode: "ResourceLocation", + operationLocation: location, + }; + } + else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath, + }; + } + else { + return undefined; + } +} +exports.inferLroMode = inferLroMode; +function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== undefined) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { + case undefined: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } +} +function getStatus(rawResponse) { + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); +} +function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } + else if (statusCode < 300) { + return "succeeded"; + } + else { + return "failed"; + } +} +function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter)) + : retryAfterInSeconds * 1000; + } + return undefined; +} +exports.parseRetryAfter = parseRetryAfter; +function getErrorFromResponse(response) { + const error = accessBodyProperty(response, "error"); + if (!error) { + logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; + } + if (!error.code || !error.message) { + logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; + } + return error; +} +exports.getErrorFromResponse = getErrorFromResponse; +function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return undefined; +} +function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case undefined: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; + } + } + const status = helper(); + return status === "running" && operationLocation === undefined ? "succeeded" : status; +} +exports.getStatusFromInitialResponse = getStatusFromInitialResponse; +/** + * Initiates the long-running operation. + */ +async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); + }, + stateProxy, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult, + }); +} +exports.initHttpOperation = initHttpOperation; +function getOperationLocation({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return undefined; + } + } +} +exports.getOperationLocation = getOperationLocation; +function getOperationStatus({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } +} +exports.getOperationStatus = getOperationStatus; +function accessBodyProperty({ flatResponse, rawResponse }, prop) { + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; +} +function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; +} +exports.getResourceLocation = getResourceLocation; +function isOperationError(e) { + return e.name === "RestError"; +} +exports.isOperationError = isOperationError; +/** Polls the long-running operation. */ +async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult + ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) + : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult, + }); +} +exports.pollHttpOperation = pollHttpOperation; +//# sourceMappingURL=operation.js.map + +/***/ }), + +/***/ 78412: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHttpPoller = void 0; +const operation_js_1 = __nccwpck_require__(57759); +const poller_js_1 = __nccwpck_require__(76713); +/** + * Creates a poller that can be used to poll a long-running operation. + * @param lro - Description of the long-running operation + * @param options - options to configure the poller + * @returns an initialized poller + */ +async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful, + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig, + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); + }, + poll: lro.sendPollRequest, + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult + ? ({ flatResponse }, state) => processResult(flatResponse, state) + : ({ flatResponse }) => flatResponse, + }); +} +exports.createHttpPoller = createHttpPoller; +//# sourceMappingURL=poller.js.map + +/***/ }), + +/***/ 90334: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHttpPoller = void 0; +const tslib_1 = __nccwpck_require__(4351); +var poller_js_1 = __nccwpck_require__(78412); +Object.defineProperty(exports, "createHttpPoller", ({ enumerable: true, get: function () { return poller_js_1.createHttpPoller; } })); +/** + * This can be uncommented to expose the protocol-agnostic poller + */ +// export { +// BuildCreatePollerOptions, +// Operation, +// CreatePollerOptions, +// OperationConfig, +// RestorableOperationState, +// } from "./poller/models"; +// export { buildCreatePoller } from "./poller/poller"; +/** legacy */ +tslib_1.__exportStar(__nccwpck_require__(52260), exports); +tslib_1.__exportStar(__nccwpck_require__(17270), exports); +tslib_1.__exportStar(__nccwpck_require__(93586), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 52260: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LroEngine = void 0; +var lroEngine_js_1 = __nccwpck_require__(35780); +Object.defineProperty(exports, "LroEngine", ({ enumerable: true, get: function () { return lroEngine_js_1.LroEngine; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 35780: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LroEngine = void 0; +const operation_js_1 = __nccwpck_require__(77954); +const constants_js_1 = __nccwpck_require__(53846); +const poller_js_1 = __nccwpck_require__(17270); +const operation_js_2 = __nccwpck_require__(70281); +/** + * The LRO Engine, a class that performs polling. + */ +class LroEngine extends poller_js_1.Poller { + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; + const state = resumeFrom + ? (0, operation_js_2.deserializeState)(resumeFrom) + : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +exports.LroEngine = LroEngine; +//# sourceMappingURL=lroEngine.js.map + +/***/ }), + +/***/ 77954: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GenericPollOperation = void 0; +const operation_js_1 = __nccwpck_require__(57759); +const logger_js_1 = __nccwpck_require__(28121); +const createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => (state.isCancelled = true), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.isStarted = true), + setSucceeded: (state) => (state.isCompleted = true), + setFailed: () => { + /** empty body */ + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), +}); +class GenericPollOperation { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + var _a; + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = Object.assign(Object.assign({}, this.state), (await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + }))); + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === undefined) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState + ? (state, { rawResponse }) => updateState(state, rawResponse) + : undefined, + isDone: isDone + ? ({ flatResponse }, state) => isDone(flatResponse, state) + : undefined, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult, + }); + } + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); + return this; + } + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +exports.GenericPollOperation = GenericPollOperation; +//# sourceMappingURL=operation.js.map + +/***/ }), + +/***/ 93586: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +//# sourceMappingURL=pollOperation.js.map + +/***/ }), + +/***/ 17270: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Poller = exports.PollerCancelledError = exports.PollerStoppedError = void 0; +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +exports.PollerStoppedError = PollerStoppedError; +/** + * When the operation is cancelled, the poller will be rejected with an instance + * of the PollerCancelledError. + */ +class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +exports.PollerCancelledError = PollerCancelledError; +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +class Poller { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error = new PollerCancelledError("Operation was canceled"); + this.reject(error); + throw error; + } + } + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + // This is needed because the state could have been updated by + // `cancelOperation`, e.g. the operation is canceled or an error occurred. + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +exports.Poller = Poller; +//# sourceMappingURL=poller.js.map + +/***/ }), + +/***/ 28121: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +const logger_1 = __nccwpck_require__(89497); +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +exports.logger = (0, logger_1.createClientLogger)("core-lro"); +//# sourceMappingURL=logger.js.map + +/***/ }), + +/***/ 53846: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.terminalStates = exports.POLL_INTERVAL_IN_MS = void 0; +/** + * The default time interval to wait before sending the next polling request. + */ +exports.POLL_INTERVAL_IN_MS = 2000; +/** + * The closed set of terminal states. + */ +exports.terminalStates = ["succeeded", "canceled", "failed"]; +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 70281: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pollOperation = exports.initOperation = exports.deserializeState = void 0; +const logger_js_1 = __nccwpck_require__(28121); +const constants_js_1 = __nccwpck_require__(53846); +/** + * Deserializes the state + */ +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); + } +} +exports.deserializeState = deserializeState; +function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error) => { + if (isOperationError(error)) { + stateProxy.setError(state, error); + stateProxy.setFailed(state); + } + throw error; + }; +} +function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; + } + return message + " " + innerMessage; +} +function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); + } + return { + code, + message, + }; +} +function processOperationStatus(result) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError === null || getError === void 0 ? void 0 : getError(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger_js_1.logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } + } + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || + (isDone === undefined && + ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult, + })); + } +} +function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; +} +/** + * Initiates the long-running operation. + */ +async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation, + }; + logger_js_1.logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; +} +exports.initOperation = initOperation; +async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError, + })); + const status = getOperationStatus(response, state); + logger_js_1.logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== undefined) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status, + }; + } + } + return { response, status }; +} +/** Polls the long-running operation. */ +async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== undefined) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options, + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult, + }); + if (!constants_js_1.terminalStates.includes(status)) { + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); + if (location !== undefined) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); + } + else + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + } + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + } +} +exports.pollOperation = pollOperation; +//# sourceMappingURL=operation.js.map + +/***/ }), + +/***/ 76713: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.buildCreatePoller = void 0; +const operation_js_1 = __nccwpck_require__(70281); +const constants_js_1 = __nccwpck_require__(53846); +const core_util_1 = __nccwpck_require__(80637); +const createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => (state.status = "canceled"), + setError: (state, error) => (state.error = error), + setResult: (state, result) => (state.result = result), + setRunning: (state) => (state.status = "running"), + setSucceeded: (state) => (state.status = "succeeded"), + setFailed: (state) => (state.status = "failed"), + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded", +}); +/** + * Returns a poller factory. + */ +function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback + ? (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() + : undefined; + const state = restoreFrom + ? (0, operation_js_1.deserializeState)(restoreFrom) + : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful, + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === undefined, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state, + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + // In the future we can use AbortSignal.any() instead + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { + abortController.abort(); + } + else if (!abortSignal.aborted) { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } + finally { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } + else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = undefined; + }))), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } + else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful, + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + }, + }; + return poller; + }; +} +exports.buildCreatePoller = buildCreatePoller; +//# sourceMappingURL=poller.js.map + +/***/ }), + +/***/ 43171: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0; +exports.SDK_VERSION = "1.18.0"; +exports.DEFAULT_RETRY_POLICY_COUNT = 3; +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 81060: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createPipelineFromOptions = createPipelineFromOptions; +const logPolicy_js_1 = __nccwpck_require__(46821); +const pipeline_js_1 = __nccwpck_require__(83906); +const redirectPolicy_js_1 = __nccwpck_require__(98526); +const userAgentPolicy_js_1 = __nccwpck_require__(88935); +const multipartPolicy_js_1 = __nccwpck_require__(19042); +const decompressResponsePolicy_js_1 = __nccwpck_require__(57618); +const defaultRetryPolicy_js_1 = __nccwpck_require__(48549); +const formDataPolicy_js_1 = __nccwpck_require__(16501); +const core_util_1 = __nccwpck_require__(80637); +const proxyPolicy_js_1 = __nccwpck_require__(94761); +const setClientRequestIdPolicy_js_1 = __nccwpck_require__(93860); +const tlsPolicy_js_1 = __nccwpck_require__(88446); +const tracingPolicy_js_1 = __nccwpck_require__(80606); +/** + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. + */ +function createPipelineFromOptions(options) { + var _a; + const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); + if (core_util_1.isNodeLike) { + if (options.tlsOptions) { + pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); + } + pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); + pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); + } + pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); + pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); + pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); + pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)(Object.assign(Object.assign({}, options.userAgentOptions), options.loggingOptions)), { + afterPhase: "Retry", + }); + if (core_util_1.isNodeLike) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); + } + pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; +} +//# sourceMappingURL=createPipelineFromOptions.js.map + +/***/ }), + +/***/ 88609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDefaultHttpClient = createDefaultHttpClient; +const nodeHttpClient_js_1 = __nccwpck_require__(49463); +/** + * Create the correct HttpClient for the current environment. + */ +function createDefaultHttpClient() { + return (0, nodeHttpClient_js_1.createNodeHttpClient)(); +} +//# sourceMappingURL=defaultHttpClient.js.map + +/***/ }), + +/***/ 60118: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHttpHeaders = createHttpHeaders; +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; + } +} +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } + else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} +//# sourceMappingURL=httpHeaders.js.map + +/***/ }), + +/***/ 29146: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createFileFromStream = exports.createFile = exports.auxiliaryAuthenticationHeaderPolicyName = exports.auxiliaryAuthenticationHeaderPolicy = exports.ndJsonPolicyName = exports.ndJsonPolicy = exports.bearerTokenAuthenticationPolicyName = exports.bearerTokenAuthenticationPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.userAgentPolicyName = exports.userAgentPolicy = exports.defaultRetryPolicy = exports.tracingPolicyName = exports.tracingPolicy = exports.retryPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.setClientRequestIdPolicyName = exports.setClientRequestIdPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.isRestError = exports.RestError = exports.createPipelineRequest = exports.createHttpHeaders = exports.createDefaultHttpClient = exports.createPipelineFromOptions = exports.createEmptyPipeline = void 0; +var pipeline_js_1 = __nccwpck_require__(83906); +Object.defineProperty(exports, "createEmptyPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createEmptyPipeline; } })); +var createPipelineFromOptions_js_1 = __nccwpck_require__(81060); +Object.defineProperty(exports, "createPipelineFromOptions", ({ enumerable: true, get: function () { return createPipelineFromOptions_js_1.createPipelineFromOptions; } })); +var defaultHttpClient_js_1 = __nccwpck_require__(88609); +Object.defineProperty(exports, "createDefaultHttpClient", ({ enumerable: true, get: function () { return defaultHttpClient_js_1.createDefaultHttpClient; } })); +var httpHeaders_js_1 = __nccwpck_require__(60118); +Object.defineProperty(exports, "createHttpHeaders", ({ enumerable: true, get: function () { return httpHeaders_js_1.createHttpHeaders; } })); +var pipelineRequest_js_1 = __nccwpck_require__(93536); +Object.defineProperty(exports, "createPipelineRequest", ({ enumerable: true, get: function () { return pipelineRequest_js_1.createPipelineRequest; } })); +var restError_js_1 = __nccwpck_require__(61036); +Object.defineProperty(exports, "RestError", ({ enumerable: true, get: function () { return restError_js_1.RestError; } })); +Object.defineProperty(exports, "isRestError", ({ enumerable: true, get: function () { return restError_js_1.isRestError; } })); +var decompressResponsePolicy_js_1 = __nccwpck_require__(57618); +Object.defineProperty(exports, "decompressResponsePolicy", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicy; } })); +Object.defineProperty(exports, "decompressResponsePolicyName", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicyName; } })); +var exponentialRetryPolicy_js_1 = __nccwpck_require__(1598); +Object.defineProperty(exports, "exponentialRetryPolicy", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicy; } })); +Object.defineProperty(exports, "exponentialRetryPolicyName", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; } })); +var setClientRequestIdPolicy_js_1 = __nccwpck_require__(93860); +Object.defineProperty(exports, "setClientRequestIdPolicy", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; } })); +Object.defineProperty(exports, "setClientRequestIdPolicyName", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; } })); +var logPolicy_js_1 = __nccwpck_require__(46821); +Object.defineProperty(exports, "logPolicy", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicy; } })); +Object.defineProperty(exports, "logPolicyName", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicyName; } })); +var multipartPolicy_js_1 = __nccwpck_require__(19042); +Object.defineProperty(exports, "multipartPolicy", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicy; } })); +Object.defineProperty(exports, "multipartPolicyName", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicyName; } })); +var proxyPolicy_js_1 = __nccwpck_require__(94761); +Object.defineProperty(exports, "proxyPolicy", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicy; } })); +Object.defineProperty(exports, "proxyPolicyName", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicyName; } })); +Object.defineProperty(exports, "getDefaultProxySettings", ({ enumerable: true, get: function () { return proxyPolicy_js_1.getDefaultProxySettings; } })); +var redirectPolicy_js_1 = __nccwpck_require__(98526); +Object.defineProperty(exports, "redirectPolicy", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicy; } })); +Object.defineProperty(exports, "redirectPolicyName", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicyName; } })); +var systemErrorRetryPolicy_js_1 = __nccwpck_require__(72470); +Object.defineProperty(exports, "systemErrorRetryPolicy", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; } })); +Object.defineProperty(exports, "systemErrorRetryPolicyName", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; } })); +var throttlingRetryPolicy_js_1 = __nccwpck_require__(54802); +Object.defineProperty(exports, "throttlingRetryPolicy", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicy; } })); +Object.defineProperty(exports, "throttlingRetryPolicyName", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; } })); +var retryPolicy_js_1 = __nccwpck_require__(39700); +Object.defineProperty(exports, "retryPolicy", ({ enumerable: true, get: function () { return retryPolicy_js_1.retryPolicy; } })); +var tracingPolicy_js_1 = __nccwpck_require__(80606); +Object.defineProperty(exports, "tracingPolicy", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicy; } })); +Object.defineProperty(exports, "tracingPolicyName", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicyName; } })); +var defaultRetryPolicy_js_1 = __nccwpck_require__(48549); +Object.defineProperty(exports, "defaultRetryPolicy", ({ enumerable: true, get: function () { return defaultRetryPolicy_js_1.defaultRetryPolicy; } })); +var userAgentPolicy_js_1 = __nccwpck_require__(88935); +Object.defineProperty(exports, "userAgentPolicy", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicy; } })); +Object.defineProperty(exports, "userAgentPolicyName", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicyName; } })); +var tlsPolicy_js_1 = __nccwpck_require__(88446); +Object.defineProperty(exports, "tlsPolicy", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicy; } })); +Object.defineProperty(exports, "tlsPolicyName", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicyName; } })); +var formDataPolicy_js_1 = __nccwpck_require__(16501); +Object.defineProperty(exports, "formDataPolicy", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicy; } })); +Object.defineProperty(exports, "formDataPolicyName", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicyName; } })); +var bearerTokenAuthenticationPolicy_js_1 = __nccwpck_require__(11319); +Object.defineProperty(exports, "bearerTokenAuthenticationPolicy", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; } })); +Object.defineProperty(exports, "bearerTokenAuthenticationPolicyName", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; } })); +var ndJsonPolicy_js_1 = __nccwpck_require__(82032); +Object.defineProperty(exports, "ndJsonPolicy", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicy; } })); +Object.defineProperty(exports, "ndJsonPolicyName", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicyName; } })); +var auxiliaryAuthenticationHeaderPolicy_js_1 = __nccwpck_require__(68152); +Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicy", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; } })); +Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicyName", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; } })); +var file_js_1 = __nccwpck_require__(3224); +Object.defineProperty(exports, "createFile", ({ enumerable: true, get: function () { return file_js_1.createFile; } })); +Object.defineProperty(exports, "createFileFromStream", ({ enumerable: true, get: function () { return file_js_1.createFileFromStream; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 30648: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +const logger_1 = __nccwpck_require__(89497); +exports.logger = (0, logger_1.createClientLogger)("core-rest-pipeline"); +//# sourceMappingURL=log.js.map + +/***/ }), + +/***/ 49463: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getBodyLength = getBodyLength; +exports.createNodeHttpClient = createNodeHttpClient; +const tslib_1 = __nccwpck_require__(4351); +const http = tslib_1.__importStar(__nccwpck_require__(88849)); +const https = tslib_1.__importStar(__nccwpck_require__(22286)); +const zlib = tslib_1.__importStar(__nccwpck_require__(65628)); +const node_stream_1 = __nccwpck_require__(84492); +const abort_controller_1 = __nccwpck_require__(11514); +const httpHeaders_js_1 = __nccwpck_require__(60118); +const restError_js_1 = __nccwpck_require__(61036); +const log_js_1 = __nccwpck_require__(30648); +const DEFAULT_TLS_SETTINGS = {}; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + if (stream.readable === false) { + return Promise.resolve(); + } + return new Promise((resolve) => { + const handler = () => { + resolve(); + stream.removeListener("close", handler); + stream.removeListener("end", handler); + stream.removeListener("error", handler); + }; + stream.on("close", handler); + stream.on("end", handler); + stream.on("error", handler); + }); +} +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; +} +class ReportTransform extends node_stream_1.Transform { + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } +} +/** + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal + */ +class NodeHttpClient { + constructor() { + this.cachedHttpsAgents = new WeakMap(); + } + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a, _b, _c; + const abortController = new AbortController(); + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new abort_controller_1.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + } + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request, abortController, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, + }; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; + } + else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } + finally { + // clean up event listener + if (request.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }) + .catch((e) => { + log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request, abortController, body) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new restError_js_1.RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : restError_js_1.RestError.REQUEST_SEND_ERROR, request })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + log_js_1.logger.error("Unrecognized body type", body); + reject(new restError_js_1.RestError("Unrecognized body type")); + } + } + else { + // streams don't like "undefined" being passed as data + req.end(); + } + }); + } + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } + else { + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } +} +function getResponseHeaders(res) { + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } + else if (value) { + headers.set(header, value); + } + } + return headers; +} +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib.createGunzip(); + stream.pipe(unzip); + return unzip; + } + else if (contentEncoding === "deflate") { + const inflate = zlib.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; +} +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); + } + else { + reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { + code: restError_js_1.RestError.PARSE_ERROR, + })); + } + }); + }); +} +/** @internal */ +function getBodyLength(body) { + if (!body) { + return 0; + } + else if (Buffer.isBuffer(body)) { + return body.length; + } + else if (isReadableStream(body)) { + return null; + } + else if (isArrayBuffer(body)) { + return body.byteLength; + } + else if (typeof body === "string") { + return Buffer.from(body).length; + } + else { + return null; + } +} +/** + * Create a new HttpClient instance for the NodeJS environment. + * @internal + */ +function createNodeHttpClient() { + return new NodeHttpClient(); +} +//# sourceMappingURL=nodeHttpClient.js.map + +/***/ }), + +/***/ 83906: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createEmptyPipeline = createEmptyPipeline; +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); +/** + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal + */ +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; + } + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; + } + clone() { + return new HttpPipeline(this._policies); + } + static create() { + return new HttpPipeline(); + } + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; + } + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } + } + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } + } + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } +} +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +function createEmptyPipeline() { + return HttpPipeline.create(); +} +//# sourceMappingURL=pipeline.js.map + +/***/ }), + +/***/ 93536: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createPipelineRequest = createPipelineRequest; +const httpHeaders_js_1 = __nccwpck_require__(60118); +const core_util_1 = __nccwpck_require__(80637); +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : (0, httpHeaders_js_1.createHttpHeaders)(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || (0, core_util_1.randomUUID)(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } +} +/** + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. + */ +function createPipelineRequest(options) { + return new PipelineRequestImpl(options); +} +//# sourceMappingURL=pipelineRequest.js.map -module.exports.mixin = function mixin(target, source) { - const keys = Object.getOwnPropertyNames(source); - for (let i = 0; i < keys.length; ++i) { - Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); - } -}; - -module.exports.wrapperSymbol = Symbol("wrapper"); -module.exports.implSymbol = Symbol("impl"); - -module.exports.wrapperForImpl = function (impl) { - return impl[module.exports.wrapperSymbol]; -}; +/***/ }), -module.exports.implForWrapper = function (wrapper) { - return wrapper[module.exports.implSymbol]; -}; +/***/ 68152: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.auxiliaryAuthenticationHeaderPolicyName = void 0; +exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; +const tokenCycler_js_1 = __nccwpck_require__(50601); +const log_js_1 = __nccwpck_require__(30648); +/** + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. + */ +exports.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; +} +/** + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works + */ +function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || log_js_1.logger; + const tokenCyclerMap = new WeakMap(); + return { + name: exports.auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${exports.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); + } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; +} +//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map /***/ }), -/***/ 62940: -/***/ ((module) => { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) +/***/ 11319: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return wrapper +"use strict"; - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bearerTokenAuthenticationPolicyName = void 0; +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +exports.parseChallenges = parseChallenges; +const tokenCycler_js_1 = __nccwpck_require__(50601); +const log_js_1 = __nccwpck_require__(30648); +const restError_js_1 = __nccwpck_require__(61036); +/** + * The programmatic identifier of the bearerTokenAuthenticationPolicy. + */ +exports.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Try to send the given request. + * + * When a response is received, returns a tuple of the response received and, if the response was received + * inside a thrown RestError, the RestError that was thrown. + * + * Otherwise, if an error was thrown while sending the request that did not provide an underlying response, it + * will be rethrown. + */ +async function trySendRequest(request, next) { + try { + return [await next(request), undefined]; } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) + catch (e) { + if ((0, restError_js_1.isRestError)(e) && e.response) { + return [e.response, e]; + } + else { + throw e; + } } - return ret - } } - - -/***/ }), - -/***/ 15185: -/***/ ((module) => { - -class Node { - /// value; - /// next; - - constructor(value) { - this.value = value; - - // TODO: Remove this when targeting Node.js 12. - this.next = undefined; - } +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + // Enable CAE true by default + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + enableCae: true, + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } } +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function isChallengeResponse(response) { + return response.status === 401 && response.headers.has("WWW-Authenticate"); +} +/** + * Re-authorize the request for CAE challenge. + * The response containing the challenge is `options.response`. + * If this method returns true, the underlying request will be sent once again. + */ +async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) { + var _a; + const { scopes } = onChallengeOptions; + const accessToken = await onChallengeOptions.getAccessToken(scopes, { + enableCae: true, + claims: caeClaims, + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `${(_a = accessToken.tokenType) !== null && _a !== void 0 ? _a : "Bearer"} ${accessToken.token}`); + return true; +} +/** + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. + */ +function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || log_js_1.logger; + const callbacks = { + authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, + authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge, + }; + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? (0, tokenCycler_js_1.createTokenCycler)(credential /* , options */) + : () => Promise.resolve(null); + return { + name: exports.bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + }); + let response; + let error; + let shouldSendRequest; + [response, error] = await trySendRequest(request, next); + if (isChallengeResponse(response)) { + let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); + // Handle CAE by default when receive CAE claim + if (claims) { + let parsedClaim; + // Return the response immediately if claims is not a valid base64 encoded string + try { + parsedClaim = atob(claims); + } + catch (e) { + logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); + return response; + } + shouldSendRequest = await authorizeRequestOnCaeChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + response, + request, + getAccessToken, + logger, + }, parsedClaim); + // Send updated request and handle response for RestError + if (shouldSendRequest) { + [response, error] = await trySendRequest(request, next); + } + } + else if (callbacks.authorizeRequestOnChallenge) { + // Handle custom challenges when client provides custom callback + shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger, + }); + // Send updated request and handle response for RestError + if (shouldSendRequest) { + [response, error] = await trySendRequest(request, next); + } + // If we get another CAE Claim, we will handle it by default and return whatever value we receive for this + if (isChallengeResponse(response)) { + claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); + if (claims) { + let parsedClaim; + try { + parsedClaim = atob(claims); + } + catch (e) { + logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); + return response; + } + shouldSendRequest = await authorizeRequestOnCaeChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + response, + request, + getAccessToken, + logger, + }, parsedClaim); + // Send updated request and handle response for RestError + if (shouldSendRequest) { + [response, error] = await trySendRequest(request, next); + } + } + } + } + } + if (error) { + throw error; + } + else { + return response; + } + }, + }; +} +/** + * Converts: `Bearer a="b", c="d", Pop e="f", g="h"`. + * Into: `[ { scheme: 'Bearer', params: { a: 'b', c: 'd' } }, { scheme: 'Pop', params: { e: 'f', g: 'h' } } ]`. + * + * @internal + */ +function parseChallenges(challenges) { + // Challenge regex seperates the string to individual challenges with different schemes in the format `Scheme a="b", c=d` + // The challenge regex captures parameteres with either quotes values or unquoted values + const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g; + // Parameter regex captures the claims group removed from the scheme in the format `a="b"` and `c="d"` + // CAE challenge always have quoted parameters. For more reference, https://learn.microsoft.com/entra/identity-platform/claims-challenge + const paramRegex = /(\w+)="([^"]*)"/g; + const parsedChallenges = []; + let match; + // Iterate over each challenge match + while ((match = challengeRegex.exec(challenges)) !== null) { + const scheme = match[1]; + const paramsString = match[2]; + const params = {}; + let paramMatch; + // Iterate over each parameter match + while ((paramMatch = paramRegex.exec(paramsString)) !== null) { + params[paramMatch[1]] = paramMatch[2]; + } + parsedChallenges.push({ scheme, params }); + } + return parsedChallenges; +} +/** + * Parse a pipeline response and look for a CAE challenge with "Bearer" scheme + * Return the value in the header without parsing the challenge + * @internal + */ +function getCaeChallengeClaims(challenges) { + var _a; + if (!challenges) { + return; + } + // Find all challenges present in the header + const parsedChallenges = parseChallenges(challenges); + return (_a = parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")) === null || _a === void 0 ? void 0 : _a.params.claims; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map -class Queue { - // TODO: Use private class fields when targeting Node.js 12. - // #_head; - // #_tail; - // #_size; - - constructor() { - this.clear(); - } - - enqueue(value) { - const node = new Node(value); - - if (this._head) { - this._tail.next = node; - this._tail = node; - } else { - this._head = node; - this._tail = node; - } - - this._size++; - } - - dequeue() { - const current = this._head; - if (!current) { - return; - } - - this._head = this._head.next; - this._size--; - return current.value; - } - - clear() { - this._head = undefined; - this._tail = undefined; - this._size = 0; - } +/***/ }), - get size() { - return this._size; - } +/***/ 57618: +/***/ ((__unused_webpack_module, exports) => { - * [Symbol.iterator]() { - let current = this._head; +"use strict"; - while (current) { - yield current.value; - current = current.next; - } - } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decompressResponsePolicyName = void 0; +exports.decompressResponsePolicy = decompressResponsePolicy; +/** + * The programmatic identifier of the decompressResponsePolicy. + */ +exports.decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * A policy to enable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +function decompressResponsePolicy() { + return { + name: exports.decompressResponsePolicyName, + async sendRequest(request, next) { + // HEAD requests have no body + if (request.method !== "HEAD") { + request.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request); + }, + }; } - -module.exports = Queue; - +//# sourceMappingURL=decompressResponsePolicy.js.map /***/ }), -/***/ 17573: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 48549: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.configAuthentication = void 0; -const fs = __importStar(__nccwpck_require__(57147)); -const os = __importStar(__nccwpck_require__(22037)); -const path = __importStar(__nccwpck_require__(71017)); -const core = __importStar(__nccwpck_require__(42186)); -const github = __importStar(__nccwpck_require__(95438)); -function configAuthentication(registryUrl, alwaysAuth) { - const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); - if (!registryUrl.endsWith('/')) { - registryUrl += '/'; - } - writeRegistryToFile(registryUrl, npmrc, alwaysAuth); -} -exports.configAuthentication = configAuthentication; -function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { - let scope = core.getInput('scope'); - if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { - scope = github.context.repo.owner; - } - if (scope && scope[0] != '@') { - scope = '@' + scope; - } - if (scope) { - scope = scope.toLowerCase() + ':'; - } - core.debug(`Setting auth in ${fileLocation}`); - let newContents = ''; - if (fs.existsSync(fileLocation)) { - const curContents = fs.readFileSync(fileLocation, 'utf8'); - curContents.split(os.EOL).forEach((line) => { - // Add current contents unless they are setting the registry - if (!line.toLowerCase().startsWith(`${scope}registry`)) { - newContents += line + os.EOL; - } - }); - } - // Remove http: or https: from front of registry. - const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; - const registryString = `${scope}registry=${registryUrl}`; - const alwaysAuthString = `always-auth=${alwaysAuth}`; - newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; - fs.writeFileSync(fileLocation, newContents); - core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); - // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it - core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); +exports.defaultRetryPolicyName = void 0; +exports.defaultRetryPolicy = defaultRetryPolicy; +const exponentialRetryStrategy_js_1 = __nccwpck_require__(843); +const throttlingRetryStrategy_js_1 = __nccwpck_require__(66645); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * Name of the {@link defaultRetryPolicy} + */ +exports.defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +function defaultRetryPolicy(options = {}) { + var _a; + return { + name: exports.defaultRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; } - +//# sourceMappingURL=defaultRetryPolicy.js.map /***/ }), -/***/ 19517: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 1598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.restoreCache = void 0; -const cache = __importStar(__nccwpck_require__(27799)); -const core = __importStar(__nccwpck_require__(42186)); -const glob = __importStar(__nccwpck_require__(28090)); -const path_1 = __importDefault(__nccwpck_require__(71017)); -const fs_1 = __importDefault(__nccwpck_require__(57147)); -const constants_1 = __nccwpck_require__(69042); -const cache_utils_1 = __nccwpck_require__(41678); -const restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - const packageManagerInfo = yield (0, cache_utils_1.getPackageManagerInfo)(packageManager); - if (!packageManagerInfo) { - throw new Error(`Caching for '${packageManager}' is not supported`); - } - const platform = process.env.RUNNER_OS; - const cachePaths = yield (0, cache_utils_1.getCacheDirectories)(packageManagerInfo, cacheDependencyPath); - core.saveState(constants_1.State.CachePaths, cachePaths); - const lockFilePath = cacheDependencyPath - ? cacheDependencyPath - : findLockFile(packageManagerInfo); - const fileHash = yield glob.hashFiles(lockFilePath); - if (!fileHash) { - throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); - } - const keyPrefix = `node-cache-${platform}-${packageManager}`; - const primaryKey = `${keyPrefix}-${fileHash}`; - core.debug(`primary key is ${primaryKey}`); - core.saveState(constants_1.State.CachePrimaryKey, primaryKey); - const isManagedByYarnBerry = yield (0, cache_utils_1.repoHasYarnBerryManagedDependencies)(packageManagerInfo, cacheDependencyPath); - let cacheKey; - if (isManagedByYarnBerry) { - core.info('All dependencies are managed locally by yarn3, the previous cache can be used'); - cacheKey = yield cache.restoreCache(cachePaths, primaryKey, [keyPrefix]); - } - else { - cacheKey = yield cache.restoreCache(cachePaths, primaryKey); - } - core.setOutput('cache-hit', Boolean(cacheKey)); - if (!cacheKey) { - core.info(`${packageManager} cache is not found`); - return; - } - core.saveState(constants_1.State.CacheMatchedKey, cacheKey); - core.info(`Cache restored from key: ${cacheKey}`); -}); -exports.restoreCache = restoreCache; -const findLockFile = (packageManager) => { - const lockFiles = packageManager.lockFilePatterns; - const workspace = process.env.GITHUB_WORKSPACE; - const rootContent = fs_1.default.readdirSync(workspace); - const lockFile = lockFiles.find(item => rootContent.includes(item)); - if (!lockFile) { - throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); - } - return path_1.default.join(workspace, lockFile); -}; - +exports.exponentialRetryPolicyName = void 0; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +const exponentialRetryStrategy_js_1 = __nccwpck_require__(843); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * The programmatic identifier of the exponentialRetryPolicy. + */ +exports.exponentialRetryPolicyName = "exponentialRetryPolicy"; +/** + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. + */ +function exponentialRetryPolicy(options = {}) { + var _a; + return (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }); +} +//# sourceMappingURL=exponentialRetryPolicy.js.map /***/ }), -/***/ 41678: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 16501: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.isGhes = exports.repoHasYarnBerryManagedDependencies = exports.getCacheDirectories = exports.resetProjectDirectoriesMemoized = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0; -const core = __importStar(__nccwpck_require__(42186)); -const exec = __importStar(__nccwpck_require__(71514)); -const cache = __importStar(__nccwpck_require__(27799)); -const glob = __importStar(__nccwpck_require__(28090)); -const path_1 = __importDefault(__nccwpck_require__(71017)); -const fs_1 = __importDefault(__nccwpck_require__(57147)); -const util_1 = __nccwpck_require__(92629); -exports.supportedPackageManagers = { - npm: { - name: 'npm', - lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('npm config get cache', 'Could not get npm cache folder path') - }, - pnpm: { - name: 'pnpm', - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderPath: () => (0, exports.getCommandOutputNotEmpty)('pnpm store path --silent', 'Could not get pnpm cache folder path') - }, - yarn: { - name: 'yarn', - lockFilePatterns: ['yarn.lock'], - getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () { - const yarnVersion = yield (0, exports.getCommandOutputNotEmpty)(`yarn --version`, 'Could not retrieve version of yarn', projectDir); - core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`); - const stdOut = yarnVersion.startsWith('1.') - ? yield (0, exports.getCommandOutput)('yarn cache dir', projectDir) - : yield (0, exports.getCommandOutput)('yarn config get cacheFolder', projectDir); - if (!stdOut) { - throw new Error(`Could not get yarn cache folder path for ${projectDir}`); - } - return stdOut; - }) - } -}; -const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd }))); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); +exports.formDataPolicyName = void 0; +exports.formDataPolicy = formDataPolicy; +const core_util_1 = __nccwpck_require__(80637); +const httpHeaders_js_1 = __nccwpck_require__(60118); +/** + * The programmatic identifier of the formDataPolicy. + */ +exports.formDataPolicyName = "formDataPolicy"; +function formDataToFormDataMap(formData) { + var _a; + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); + formDataMap[key].push(value); } - return stdout.trim(); -}); -exports.getCommandOutput = getCommandOutput; -const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = (0, exports.getCommandOutput)(toolCommand, cwd); - if (!stdOut) { - throw new Error(error); + return formDataMap; +} +/** + * A policy that encodes FormData on the request into the body. + */ +function formDataPolicy() { + return { + name: exports.formDataPolicyName, + async sendRequest(request, next) { + if (core_util_1.isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { + request.formData = formDataToFormDataMap(request.body); + request.body = undefined; + } + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); + } + else { + await prepareFormData(request.formData, request); + } + request.formData = undefined; + } + return next(request); + }, + }; +} +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } + else { + urlSearchParams.append(key, value.toString()); + } } - return stdOut; -}); -exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty; -const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; + return urlSearchParams.toString(); +} +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: (0, httpHeaders_js_1.createHttpHeaders)({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: (0, core_util_1.stringToUint8Array)(value, "utf-8"), + }); + } + else if (value === undefined || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } + else { + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + // again, || is used since an empty value.type means the content type is unset + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value, + }); + } + } } - else if (packageManager === 'yarn') { - return exports.supportedPackageManagers.yarn; + request.multipartBody = { parts }; +} +//# sourceMappingURL=formDataPolicy.js.map + +/***/ }), + +/***/ 46821: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logPolicyName = void 0; +exports.logPolicy = logPolicy; +const log_js_1 = __nccwpck_require__(30648); +const sanitizer_js_1 = __nccwpck_require__(34472); +/** + * The programmatic identifier of the logPolicy. + */ +exports.logPolicyName = "logPolicy"; +/** + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. + */ +function logPolicy(options = {}) { + var _a; + const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : log_js_1.logger.info; + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: exports.logPolicyName, + async sendRequest(request, next) { + if (!logger.enabled) { + return next(request); + } + logger(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; +} +//# sourceMappingURL=logPolicy.js.map + +/***/ }), + +/***/ 19042: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.multipartPolicyName = void 0; +exports.multipartPolicy = multipartPolicy; +const core_util_1 = __nccwpck_require__(80637); +const concat_js_1 = __nccwpck_require__(80107); +const typeGuards_js_1 = __nccwpck_require__(58520); +function generateBoundary() { + return `----AzSDKFormBoundary${(0, core_util_1.randomUUID)()}`; +} +function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; + } + return result; +} +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } + else if ((0, typeGuards_js_1.isBlob)(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; } else { - return null; + return undefined; } -}); -exports.getPackageManagerInfo = getPackageManagerInfo; +} +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; + } + else { + total += partLength; + } + } + return total; +} +async function buildRequestBody(request, parts, boundary) { + const sources = [ + (0, core_util_1.stringToUint8Array)(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), + (0, core_util_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), + (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), + part.body, + (0, core_util_1.stringToUint8Array)(`\r\n--${boundary}`, "utf-8"), + ]), + (0, core_util_1.stringToUint8Array)("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); + } + request.body = await (0, concat_js_1.concat)(sources); +} /** - * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` - * - first through `getCacheDirectories` - * - second from `repoHasYarn3ManagedCache` - * - * it contains expensive IO operation and thus should be memoized + * Name of multipart policy */ -let projectDirectoriesMemoized = null; +exports.multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + } + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + } +} /** - * unit test must reset memoized variables + * Pipeline policy for multipart requests */ -const resetProjectDirectoriesMemoized = () => (projectDirectoriesMemoized = null); -exports.resetProjectDirectoriesMemoized = resetProjectDirectoriesMemoized; +function multipartPolicy() { + return { + name: exports.multipartPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); + } + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); + } + else { + boundary = generateBoundary(); + } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; +} +//# sourceMappingURL=multipartPolicy.js.map + +/***/ }), + +/***/ 82032: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ndJsonPolicyName = void 0; +exports.ndJsonPolicy = ndJsonPolicy; /** - * Expands (converts) the string input `cache-dependency-path` to list of directories that - * may be project roots - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return list of directories and possible + * The programmatic identifier of the ndJsonPolicy. */ -const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - if (projectDirectoriesMemoized !== null) { - return projectDirectoriesMemoized; - } - const globber = yield glob.create(cacheDependencyPath); - const cacheDependenciesPaths = yield globber.glob(); - const existingDirectories = cacheDependenciesPaths - .map(path_1.default.dirname) - .filter((0, util_1.unique)()) - .map(dirName => fs_1.default.realpathSync(dirName)) - .filter(directory => fs_1.default.lstatSync(directory).isDirectory()); - if (!existingDirectories.length) - core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`); - projectDirectoriesMemoized = existingDirectories; - return existingDirectories; -}); +exports.ndJsonPolicyName = "ndJsonPolicy"; /** - * Finds the cache directories configured for the repo if cache-dependency-path is not empty - * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return list of files on which the cache depends + * ndJsonPolicy is a policy used to control keep alive settings for every request. */ -const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath); - const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () { - const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(projectDirectory); - core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`); - return cacheFolderPath; - }))); - // uniq in order to do not cache the same directories twice - return cacheFoldersPaths.filter((0, util_1.unique)()); -}); +function ndJsonPolicy() { + return { + name: exports.ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); + }, + }; +} +//# sourceMappingURL=ndJsonPolicy.js.map + +/***/ }), + +/***/ 94761: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.globalNoProxyList = exports.proxyPolicyName = void 0; +exports.loadNoProxy = loadNoProxy; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.proxyPolicy = proxyPolicy; +const https_proxy_agent_1 = __nccwpck_require__(77219); +const http_proxy_agent_1 = __nccwpck_require__(4654); +const log_js_1 = __nccwpck_require__(30648); +const HTTPS_PROXY = "HTTPS_PROXY"; +const HTTP_PROXY = "HTTP_PROXY"; +const ALL_PROXY = "ALL_PROXY"; +const NO_PROXY = "NO_PROXY"; +/** + * The programmatic identifier of the proxyPolicy. + */ +exports.proxyPolicyName = "proxyPolicy"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +exports.globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} /** - * Finds the cache directories configured for the repo ignoring cache-dependency-path - * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM - * @return list of files on which the cache depends + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 */ -const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () { - const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(); - core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`); - return [cacheFolderPath]; -}); +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = new URL(uri).hostname; + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} /** - * A function to find the cache directories configured for the repo - * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty - * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return list of files on which the cache depends + * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. + * If no argument is given, it attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + * @param proxyUrl - The url of the proxy to use. May contain authentication information. + * @deprecated - Internally this method is no longer necessary when setting proxy information. */ -const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project - // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 - if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { - return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath); +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } } - return getCacheDirectoriesForRootProject(packageManagerInfo); -}); -exports.getCacheDirectories = getCacheDirectories; + const parsedUrl = new URL(proxyUrl); + const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password, + }; +} /** - * A function to check if the directory is a yarn project configured to manage - * obsolete dependencies in the local cache - * @param directory - a path to the folder - * @return - true if the directory's project is yarn managed - * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false - * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false - * - if local cache is not explicitly enabled (not yarn3), return false - * - return true otherwise + * This method attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. */ -const projectHasYarnBerryManagedDependencies = (directory) => __awaiter(void 0, void 0, void 0, function* () { - const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; - core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); - // if .yarn/cache directory exists the cache is managed by version control system - const yarnCacheFile = path_1.default.join(workDir, '.yarn', 'cache'); - if (fs_1.default.existsSync(yarnCacheFile) && - fs_1.default.lstatSync(yarnCacheFile).isDirectory()) { - core.debug(`"${workDir}" has .yarn/cache - dependencies are kept in the repository`); - return Promise.resolve(false); +function getDefaultProxySettingsInternal() { + const envProxy = loadEnvironmentProxyValue(); + return envProxy ? new URL(envProxy) : undefined; +} +function getUrlFromProxySettings(settings) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(settings.host); } - // NOTE: yarn1 returns 'undefined' with return code = 0 - const enableGlobalCache = yield (0, exports.getCommandOutput)('yarn config get enableGlobalCache', workDir); - // only local cache is not managed by yarn - const managed = enableGlobalCache.includes('false'); - if (managed) { - core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); - return true; + catch (_a) { + throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); + } + parsedProxyUrl.port = String(settings.port); + if (settings.username) { + parsedProxyUrl.username = settings.username; + } + if (settings.password) { + parsedProxyUrl.password = settings.password; + } + return parsedProxyUrl; +} +function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { + // Custom Agent should take precedence so if one is present + // we should skip to avoid overwriting it. + if (request.agent) { + return; + } + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (request.tlsSettings) { + log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const headers = request.headers.toJSON(); + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpProxyAgent; } else { - core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); - return false; + if (!cachedAgents.httpsProxyAgent) { + cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpsProxyAgent; } -}); +} /** - * A function to report the repo contains Yarn managed projects - * @param packageManagerInfo - used to make sure current package manager is yarn - * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns - * expected to be the result of `core.getInput('cache-dependency-path')` - * @return - true if all project directories configured to be Yarn managed + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns */ -const repoHasYarnBerryManagedDependencies = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManagerInfo.name !== 'yarn') - return false; - const yarnDirs = cacheDependencyPath - ? yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) - : ['']; - const isManagedList = yield Promise.all(yarnDirs.map(projectHasYarnBerryManagedDependencies)); - return isManagedList.every(Boolean); -}); -exports.repoHasYarnBerryManagedDependencies = repoHasYarnBerryManagedDependencies; -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (cache.isFeatureAvailable()) - return true; - if (isGhes()) { - core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - return false; +function proxyPolicy(proxySettings, options) { + if (!noProxyListLoaded) { + exports.globalNoProxyList.push(...loadNoProxy()); } - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - return false; + const defaultProxy = proxySettings + ? getUrlFromProxySettings(proxySettings) + : getDefaultProxySettingsInternal(); + const cachedAgents = {}; + return { + name: exports.proxyPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.proxySettings && + defaultProxy && + !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : exports.globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { + setProxyAgentOnRequest(request, cachedAgents, defaultProxy); + } + else if (request.proxySettings) { + setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); + } + return next(request); + }, + }; } -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; - +//# sourceMappingURL=proxyPolicy.js.map /***/ }), -/***/ 69042: +/***/ 98526: /***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Outputs = exports.State = exports.LockType = void 0; -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType || (exports.LockType = LockType = {})); -var State; -(function (State) { - State["CachePackageManager"] = "SETUP_NODE_CACHE_PACKAGE_MANAGER"; - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; - State["CachePaths"] = "CACHE_PATHS"; -})(State || (exports.State = State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs || (exports.Outputs = Outputs = {})); - +exports.redirectPolicyName = void 0; +exports.redirectPolicy = redirectPolicy; +/** + * The programmatic identifier of the redirectPolicy. + */ +exports.redirectPolicyName = "redirectPolicy"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +/** + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. + */ +function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: exports.redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map /***/ }), -/***/ 70957: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 39700: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const tc = __importStar(__nccwpck_require__(27784)); -const semver_1 = __importDefault(__nccwpck_require__(11383)); -const base_distribution_1 = __importDefault(__nccwpck_require__(80007)); -class BasePrereleaseNodejs extends base_distribution_1.default { - constructor(nodeInfo) { - super(nodeInfo); - } - findVersionInHostedToolCacheDirectory() { - let toolPath = ''; - const localVersionPaths = tc - .findAllVersions('node', this.nodeInfo.arch) - .filter(i => { - const prerelease = semver_1.default.prerelease(i, {}); - if (!prerelease) { - return false; +exports.retryPolicy = retryPolicy; +const helpers_js_1 = __nccwpck_require__(21333); +const logger_1 = __nccwpck_require__(89497); +const abort_controller_1 = __nccwpck_require__(11514); +const constants_js_1 = __nccwpck_require__(43171); +const retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new abort_controller_1.AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await (0, helpers_js_1.delay)(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. } - return prerelease[0].toString().includes(this.distribution); - }); - localVersionPaths.sort(semver_1.default.rcompare); - const localVersion = this.evaluateVersions(localVersionPaths); - if (localVersion) { - toolPath = tc.find('node', localVersion, this.nodeInfo.arch); - } - return toolPath; - } - validRange(versionSpec) { - let range; - const [raw, prerelease] = this.splitVersionSpec(versionSpec); - const isValidVersion = semver_1.default.valid(raw); - const rawVersion = (isValidVersion ? raw : semver_1.default.coerce(raw)); - if (prerelease !== this.distribution) { - range = versionSpec; - } - else { - range = `${semver_1.default.validRange(`^${rawVersion}-${this.distribution}`)}-0`; - } - return { range, options: { includePrerelease: !isValidVersion } }; - } - splitVersionSpec(versionSpec) { - return versionSpec.split(/-(.*)/s); - } + }, + }; } -exports["default"] = BasePrereleaseNodejs; - +//# sourceMappingURL=retryPolicy.js.map /***/ }), -/***/ 80007: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 93860: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const uuid_1 = __nccwpck_require__(75840); -const tc = __importStar(__nccwpck_require__(27784)); -const hc = __importStar(__nccwpck_require__(96255)); -const core = __importStar(__nccwpck_require__(42186)); -const io = __importStar(__nccwpck_require__(47351)); -const semver_1 = __importDefault(__nccwpck_require__(11383)); -const assert = __importStar(__nccwpck_require__(39491)); -const path = __importStar(__nccwpck_require__(71017)); -const os_1 = __importDefault(__nccwpck_require__(22037)); -const fs_1 = __importDefault(__nccwpck_require__(57147)); -class BaseDistribution { - constructor(nodeInfo) { - this.nodeInfo = nodeInfo; - this.osPlat = os_1.default.platform(); - this.httpClient = new hc.HttpClient('setup-node', [], { - allowRetries: true, - maxRetries: 3 - }); - } - setupNodeJs() { - return __awaiter(this, void 0, void 0, function* () { - let nodeJsVersions; - if (this.nodeInfo.checkLatest) { - const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); - this.nodeInfo.versionSpec = evaluatedVersion; - } - let toolPath = this.findVersionInHostedToolCacheDirectory(); - if (toolPath) { - core.info(`Found in cache @ ${toolPath}`); - } - else { - const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); - const toolName = this.getNodejsDistInfo(evaluatedVersion); - toolPath = yield this.downloadNodejs(toolName); - } - if (this.osPlat != 'win32') { - toolPath = path.join(toolPath, 'bin'); +exports.setClientRequestIdPolicyName = void 0; +exports.setClientRequestIdPolicy = setClientRequestIdPolicy; +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +exports.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: exports.setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); } - core.addPath(toolPath); - }); - } - findVersionInDist(nodeJsVersions) { - return __awaiter(this, void 0, void 0, function* () { - if (!nodeJsVersions) { - nodeJsVersions = yield this.getNodeJsVersions(); + return next(request); + }, + }; +} +//# sourceMappingURL=setClientRequestIdPolicy.js.map + +/***/ }), + +/***/ 72470: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.systemErrorRetryPolicyName = void 0; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +const exponentialRetryStrategy_js_1 = __nccwpck_require__(843); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * Name of the {@link systemErrorRetryPolicy} + */ +exports.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; +/** + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. + */ +function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: exports.systemErrorRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map + +/***/ }), + +/***/ 54802: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.throttlingRetryPolicyName = void 0; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +const throttlingRetryStrategy_js_1 = __nccwpck_require__(66645); +const retryPolicy_js_1 = __nccwpck_require__(39700); +const constants_js_1 = __nccwpck_require__(43171); +/** + * Name of the {@link throttlingRetryPolicy} + */ +exports.throttlingRetryPolicyName = "throttlingRetryPolicy"; +/** + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. + */ +function throttlingRetryPolicy(options = {}) { + var _a; + return { + name: exports.throttlingRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=throttlingRetryPolicy.js.map + +/***/ }), + +/***/ 88446: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.tlsPolicyName = void 0; +exports.tlsPolicy = tlsPolicy; +/** + * Name of the TLS Policy + */ +exports.tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +function tlsPolicy(tlsSettings) { + return { + name: exports.tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; } - const versions = this.filterVersions(nodeJsVersions); - const evaluatedVersion = this.evaluateVersions(versions); - if (!evaluatedVersion) { - throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); + return next(req); + }, + }; +} +//# sourceMappingURL=tlsPolicy.js.map + +/***/ }), + +/***/ 80606: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.tracingPolicyName = void 0; +exports.tracingPolicy = tracingPolicy; +const core_tracing_1 = __nccwpck_require__(19363); +const constants_js_1 = __nccwpck_require__(43171); +const userAgent_js_1 = __nccwpck_require__(96158); +const log_js_1 = __nccwpck_require__(30648); +const core_util_1 = __nccwpck_require__(80637); +const restError_js_1 = __nccwpck_require__(61036); +const sanitizer_js_1 = __nccwpck_require__(34472); +/** + * The programmatic identifier of the tracingPolicy. + */ +exports.tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +function tracingPolicy(options = {}) { + const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + const tracingClient = tryCreateTracingClient(); + return { + name: exports.tracingPolicyName, + async sendRequest(request, next) { + var _a; + if (!tracingClient) { + return next(request); + } + const userAgent = await userAgentPromise; + const spanAttributes = { + "http.url": sanitizer.sanitizeUrl(request.url), + "http.method": request.method, + "http.user_agent": userAgent, + requestId: request.requestId, + }; + if (userAgent) { + spanAttributes["http.user_agent"] = userAgent; } - return evaluatedVersion; - }); - } - evaluateVersions(versions) { - let version = ''; - const { range, options } = this.validRange(this.nodeInfo.versionSpec); - core.debug(`evaluating ${versions.length} versions`); - for (const potential of versions) { - const satisfied = semver_1.default.satisfies(potential, range, options); - if (satisfied) { - version = potential; - break; + const { span, tracingContext } = (_a = tryCreateSpan(tracingClient, request, spanAttributes)) !== null && _a !== void 0 ? _a : {}; + if (!span || !tracingContext) { + return next(request); } - } - if (version) { - core.debug(`matched: ${version}`); - } - else { - core.debug('match not found'); - } - return version; - } - findVersionInHostedToolCacheDirectory() { - return tc.find('node', this.nodeInfo.versionSpec, this.translateArchToDistUrl(this.nodeInfo.arch)); - } - getNodeJsVersions() { - return __awaiter(this, void 0, void 0, function* () { - const initialUrl = this.getDistributionUrl(); - const dataUrl = `${initialUrl}/index.json`; - const response = yield this.httpClient.getJson(dataUrl); - return response.result || []; - }); - } - getNodejsDistInfo(version) { - const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); - version = semver_1.default.clean(version) || ''; - const fileName = this.osPlat == 'win32' - ? `node-v${version}-win-${osArch}` - : `node-v${version}-${this.osPlat}-${osArch}`; - const urlFileName = this.osPlat == 'win32' - ? this.nodeInfo.arch === 'arm64' - ? `${fileName}.zip` - : `${fileName}.7z` - : `${fileName}.tar.gz`; - const initialUrl = this.getDistributionUrl(); - const url = `${initialUrl}/v${version}/${urlFileName}`; - return { - downloadUrl: url, - resolvedVersion: version, - arch: osArch, - fileName: fileName - }; - } - downloadNodejs(info) { - return __awaiter(this, void 0, void 0, function* () { - let downloadPath = ''; - core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); try { - downloadPath = yield tc.downloadTool(info.downloadUrl); + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; } catch (err) { - if (err instanceof tc.HTTPError && - err.httpStatusCode == 404 && - this.osPlat == 'win32') { - return yield this.acquireWindowsNodeFromFallbackLocation(info.resolvedVersion, info.arch); - } + tryProcessError(span, err); throw err; } - const toolPath = yield this.extractArchive(downloadPath, info); - core.info('Done'); - return toolPath; + }, + }; +} +function tryCreateTracingClient() { + try { + return (0, core_tracing_1.createTracingClient)({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: constants_js_1.SDK_VERSION, }); } - validRange(versionSpec) { - var _a; - let options; - const c = semver_1.default.clean(versionSpec) || ''; - const valid = (_a = semver_1.default.valid(c)) !== null && _a !== void 0 ? _a : versionSpec; - return { range: valid, options }; + catch (e) { + log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`); + return undefined; } - acquireWindowsNodeFromFallbackLocation(version_1) { - return __awaiter(this, arguments, void 0, function* (version, arch = os_1.default.arch()) { - const initialUrl = this.getDistributionUrl(); - const osArch = this.translateArchToDistUrl(arch); - // Create temporary folder to download to - const tempDownloadFolder = `temp_${(0, uuid_1.v4)()}`; - const tempDirectory = process.env['RUNNER_TEMP'] || ''; - assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); - const tempDir = path.join(tempDirectory, tempDownloadFolder); - yield io.mkdirP(tempDir); - let exeUrl; - let libUrl; - try { - exeUrl = `${initialUrl}/v${version}/win-${osArch}/node.exe`; - libUrl = `${initialUrl}/v${version}/win-${osArch}/node.lib`; - core.info(`Downloading only node binary from ${exeUrl}`); - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - catch (err) { - if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { - exeUrl = `${initialUrl}/v${version}/node.exe`; - libUrl = `${initialUrl}/v${version}/node.lib`; - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - else { - throw err; - } - } - const toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); - return toolPath; +} +function tryCreateSpan(tracingClient, request, spanAttributes) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes, }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); + } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; } - extractArchive(downloadPath, info) { - return __awaiter(this, void 0, void 0, function* () { - // - // Extract - // - core.info('Extracting ...'); - let extPath; - info = info || {}; // satisfy compiler, never null when reaches here - if (this.osPlat == 'win32') { - const extension = this.nodeInfo.arch === 'arm64' ? '.zip' : '.7z'; - // Rename archive to add extension because after downloading - // archive does not contain extension type and it leads to some issues - // on Windows runners without PowerShell Core. - // - // For default PowerShell Windows it should contain extension type to unpack it. - if (extension === '.zip') { - const renamedArchive = `${downloadPath}.zip`; - fs_1.default.renameSync(downloadPath, renamedArchive); - extPath = yield tc.extractZip(renamedArchive); - } - else { - const _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); - extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); - } - // 7z extracts to folder matching file name - const nestedPath = path.join(extPath, path.basename(info.fileName, extension)); - if (fs_1.default.existsSync(nestedPath)) { - extPath = nestedPath; - } - } - else { - extPath = yield tc.extractTar(downloadPath, undefined, [ - 'xz', - '--strip', - '1' - ]); - } - // - // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded - // - core.info('Adding to the cache ...'); - const toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); - return toolPath; - }); + catch (e) { + log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + return undefined; } - getDistFileName() { - const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); - // node offers a json list of versions - let dataFileName; - switch (this.osPlat) { - case 'linux': - dataFileName = `linux-${osArch}`; - break; - case 'darwin': - dataFileName = `osx-${osArch}-tar`; - break; - case 'win32': - if (this.nodeInfo.arch === 'arm64') { - dataFileName = `win-${osArch}-zip`; - } - else { - dataFileName = `win-${osArch}-exe`; - } - break; - default: - throw new Error(`Unexpected OS '${this.osPlat}'`); +} +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: (0, core_util_1.isError)(error) ? error : undefined, + }); + if ((0, restError_js_1.isRestError)(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); } - return dataFileName; + span.end(); } - filterVersions(nodeJsVersions) { - const versions = []; - const dataFileName = this.getDistFileName(); - nodeJsVersions.forEach((nodeVersion) => { - // ensure this version supports your os and platform - if (nodeVersion.files.indexOf(dataFileName) >= 0) { - versions.push(nodeVersion.version); - } - }); - return versions.sort(semver_1.default.rcompare); + catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); } - translateArchToDistUrl(arch) { - switch (arch) { - case 'arm': - return 'armv7l'; - default: - return arch; +} +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); } + span.setStatus({ + status: "success", + }); + span.end(); + } + catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); } } -exports["default"] = BaseDistribution; +//# sourceMappingURL=tracingPolicy.js.map +/***/ }), + +/***/ 88935: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.userAgentPolicyName = void 0; +exports.userAgentPolicy = userAgentPolicy; +const userAgent_js_1 = __nccwpck_require__(96158); +const UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); +/** + * The programmatic identifier of the userAgentPolicy. + */ +exports.userAgentPolicyName = "userAgentPolicy"; +/** + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. + */ +function userAgentPolicy(options = {}) { + const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + return { + name: exports.userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, await userAgentValue); + } + return next(request); + }, + }; +} +//# sourceMappingURL=userAgentPolicy.js.map /***/ }), -/***/ 85617: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 61036: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getNodejsDistribution = void 0; -const nightly_builds_1 = __importDefault(__nccwpck_require__(57127)); -const official_builds_1 = __importDefault(__nccwpck_require__(77854)); -const rc_builds_1 = __importDefault(__nccwpck_require__(98837)); -const canary_builds_1 = __importDefault(__nccwpck_require__(50969)); -var Distributions; -(function (Distributions) { - Distributions["DEFAULT"] = ""; - Distributions["CANARY"] = "v8-canary"; - Distributions["NIGHTLY"] = "nightly"; - Distributions["RC"] = "rc"; -})(Distributions || (Distributions = {})); -function getNodejsDistribution(installerOptions) { - const versionSpec = installerOptions.versionSpec; - let distribution; - if (versionSpec.includes(Distributions.NIGHTLY)) { - distribution = new nightly_builds_1.default(installerOptions); - } - else if (versionSpec.includes(Distributions.CANARY)) { - distribution = new canary_builds_1.default(installerOptions); +exports.RestError = void 0; +exports.isRestError = isRestError; +const core_util_1 = __nccwpck_require__(80637); +const inspect_js_1 = __nccwpck_require__(33106); +const sanitizer_js_1 = __nccwpck_require__(34472); +const errorSanitizer = new sanitizer_js_1.Sanitizer(); +/** + * A custom error type for failed pipeline requests. + */ +class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + // The request and response may contain sensitive information in the headers or body. + // To help prevent this sensitive information being accidentally logged, the request and response + // properties are marked as non-enumerable here. This prevents them showing up in the output of + // JSON.stringify and console.log. + Object.defineProperty(this, "request", { value: options.request, enumerable: false }); + Object.defineProperty(this, "response", { value: options.response, enumerable: false }); + Object.setPrototypeOf(this, RestError.prototype); } - else if (versionSpec.includes(Distributions.RC)) { - distribution = new rc_builds_1.default(installerOptions); + /** + * Logging method for util.inspect in Node + */ + [inspect_js_1.custom]() { + // Extract non-enumerable properties and add them back. This is OK since in this output the request and + // response get sanitized. + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(Object.assign(Object.assign({}, this), { request: this.request, response: this.response }))}`; } - else { - distribution = new official_builds_1.default(installerOptions); +} +exports.RestError = RestError; +/** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +function isRestError(e) { + if (e instanceof RestError) { + return true; } - return distribution; + return (0, core_util_1.isError)(e) && e.name === "RestError"; } -exports.getNodejsDistribution = getNodejsDistribution; - +//# sourceMappingURL=restError.js.map /***/ }), -/***/ 57127: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 843: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(70957)); -class NightlyNodejs extends base_distribution_prerelease_1.default { - constructor(nodeInfo) { - super(nodeInfo); - this.distribution = 'nightly'; - } - getDistributionUrl() { - return 'https://nodejs.org/download/nightly'; +exports.exponentialRetryStrategy = exponentialRetryStrategy; +exports.isExponentialRetryResponse = isExponentialRetryResponse; +exports.isSystemError = isSystemError; +const core_util_1 = __nccwpck_require__(80637); +const throttlingRetryStrategy_js_1 = __nccwpck_require__(66645); +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + return (0, core_util_1.calculateRetryDelay)(retryCount, { + retryDelayInMs: retryInterval, + maxRetryDelayInMs: maxRetryInterval, + }); + }, + }; +} +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +function isSystemError(err) { + if (!err) { + return false; } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); } -exports["default"] = NightlyNodejs; - +//# sourceMappingURL=exponentialRetryStrategy.js.map /***/ }), -/***/ 77854: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 66645: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(42186)); -const tc = __importStar(__nccwpck_require__(27784)); -const path_1 = __importDefault(__nccwpck_require__(71017)); -const base_distribution_1 = __importDefault(__nccwpck_require__(80007)); -class OfficialBuilds extends base_distribution_1.default { - constructor(nodeInfo) { - super(nodeInfo); - } - setupNodeJs() { - return __awaiter(this, void 0, void 0, function* () { - var _a; - let manifest; - let nodeJsVersions; - const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); - if (this.isLtsAlias(this.nodeInfo.versionSpec)) { - core.info('Attempt to resolve LTS alias from manifest...'); - // No try-catch since it's not possible to resolve LTS alias without manifest - manifest = yield this.getManifest(); - this.nodeInfo.versionSpec = this.resolveLtsAliasFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, manifest); - } - if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { - nodeJsVersions = yield this.getNodeJsVersions(); - const versions = this.filterVersions(nodeJsVersions); - this.nodeInfo.versionSpec = this.evaluateVersions(versions); - core.info('getting latest node version...'); - } - if (this.nodeInfo.checkLatest) { - core.info('Attempt to resolve the latest version from manifest...'); - const resolvedVersion = yield this.resolveVersionFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); - if (resolvedVersion) { - this.nodeInfo.versionSpec = resolvedVersion; - core.info(`Resolved as '${resolvedVersion}'`); - } - else { - core.info(`Failed to resolve version ${this.nodeInfo.versionSpec} from manifest`); - } - } - let toolPath = this.findVersionInHostedToolCacheDirectory(); - if (toolPath) { - core.info(`Found in cache @ ${toolPath}`); - this.addToolPath(toolPath); - return; - } - let downloadPath = ''; - try { - core.info(`Attempting to download ${this.nodeInfo.versionSpec}...`); - const versionInfo = yield this.getInfoFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); - if (versionInfo) { - core.info(`Acquiring ${versionInfo.resolvedVersion} - ${versionInfo.arch} from ${versionInfo.downloadUrl}`); - downloadPath = yield tc.downloadTool(versionInfo.downloadUrl, undefined, this.nodeInfo.auth); - if (downloadPath) { - toolPath = yield this.extractArchive(downloadPath, versionInfo); - } - } - else { - core.info('Not found in manifest. Falling back to download directly from Node'); - } - } - catch (err) { - // Rate limit? - if (err instanceof tc.HTTPError && - (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { - core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); - } - else { - core.info(err.message); - } - core.debug((_a = err.stack) !== null && _a !== void 0 ? _a : 'empty stack'); - core.info('Falling back to download directly from Node'); - } - if (!toolPath) { - toolPath = yield this.downloadDirectlyFromNode(); - } - if (this.osPlat != 'win32') { - toolPath = path_1.default.join(toolPath, 'bin'); - } - core.addPath(toolPath); - }); +exports.isThrottlingRetryResponse = isThrottlingRetryResponse; +exports.throttlingRetryStrategy = throttlingRetryStrategy; +const helpers_js_1 = __nccwpck_require__(21333); +/** + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). + */ +const RetryAfterHeader = "Retry-After"; +/** + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp + */ +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; +/** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * + * @internal + */ +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds + } + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; } - addToolPath(toolPath) { - if (this.osPlat != 'win32') { - toolPath = path_1.default.join(toolPath, 'bin'); - } - core.addPath(toolPath); + catch (_a) { + return undefined; } - downloadDirectlyFromNode() { - return __awaiter(this, void 0, void 0, function* () { - const nodeJsVersions = yield this.getNodeJsVersions(); - const versions = this.filterVersions(nodeJsVersions); - const evaluatedVersion = this.evaluateVersions(versions); - if (!evaluatedVersion) { - throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); - } - const toolName = this.getNodejsDistInfo(evaluatedVersion); - try { - const toolPath = yield this.downloadNodejs(toolName); - return toolPath; - } - catch (error) { - if (error instanceof tc.HTTPError && error.httpStatusCode === 404) { - core.warning(`Node version ${this.nodeInfo.versionSpec} for platform ${this.osPlat} and architecture ${this.nodeInfo.arch} was found but failed to download. ` + - 'This usually happens when downloadable binaries are not fully updated at https://nodejs.org/. ' + - 'To resolve this issue you may either fall back to the older version or try again later.'); +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs, + }; + }, + }; +} +//# sourceMappingURL=throttlingRetryStrategy.js.map + +/***/ }), + +/***/ 80107: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.concat = concat; +const tslib_1 = __nccwpck_require__(4351); +const node_stream_1 = __nccwpck_require__(84492); +const typeGuards_js_1 = __nccwpck_require__(58520); +const file_js_1 = __nccwpck_require__(3224); +function streamAsyncIterator() { + return tslib_1.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = yield tslib_1.__await(reader.read()); + if (done) { + return yield tslib_1.__await(void 0); } - throw error; + yield yield tslib_1.__await(value); } - }); - } - evaluateVersions(versions) { - let version = ''; - if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { - core.info(`getting latest node version...`); - return versions[0]; } - version = super.evaluateVersions(versions); - return version; - } - getDistributionUrl() { - return `https://nodejs.org/dist`; + finally { + reader.releaseLock(); + } + }); +} +function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); } - getManifest() { - core.debug('Getting manifest from actions/node-versions@main'); - return tc.getManifestFromRepo('actions', 'node-versions', this.nodeInfo.auth, 'main'); + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); } - resolveLtsAliasFromManifest(versionSpec, stable, manifest) { - var _a; - const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (!alias) { - throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); - } - core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); - // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. - const n = Number(alias); - const aliases = Object.fromEntries(manifest - .filter(x => x.lts && x.stable === stable) - .map(x => [x.lts.toLowerCase(), x]) - .reverse()); - const numbered = Object.values(aliases); - const release = alias === '*' - ? numbered[numbered.length - 1] - : n < 0 - ? numbered[numbered.length - 1 + n] - : aliases[alias]; - if (!release) { - throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); - } - core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); - return release.version.split('.')[0]; +} +function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return node_stream_1.Readable.fromWeb(stream); } - resolveVersionFromManifest(versionSpec, stable, osArch, manifest) { - return __awaiter(this, void 0, void 0, function* () { - try { - const info = yield this.getInfoFromManifest(versionSpec, stable, osArch, manifest); - return info === null || info === void 0 ? void 0 : info.resolvedVersion; - } - catch (err) { - core.info('Unable to resolve version from manifest...'); - core.debug(err.message); - } - }); + else { + return stream; } - getInfoFromManifest(versionSpec, stable, osArch, manifest) { - return __awaiter(this, void 0, void 0, function* () { - let info = null; - if (!manifest) { - core.debug('No manifest cached'); - manifest = yield this.getManifest(); - } - const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); - if (rel && rel.files.length > 0) { - info = {}; - info.resolvedVersion = rel.version; - info.arch = rel.files[0].arch; - info.downloadUrl = rel.files[0].download_url; - info.fileName = rel.files[0].filename; - } - return info; - }); +} +function toStream(source) { + if (source instanceof Uint8Array) { + return node_stream_1.Readable.from(Buffer.from(source)); } - isLtsAlias(versionSpec) { - return versionSpec.startsWith('lts/'); + else if ((0, typeGuards_js_1.isBlob)(source)) { + return toStream((0, file_js_1.getRawContent)(source)); } - isLatestSyntax(versionSpec) { - return ['current', 'latest', 'node'].includes(versionSpec); + else { + return ensureNodeStream(source); } } -exports["default"] = OfficialBuilds; - +/** + * Utility function that concatenates a set of binary inputs into one combined output. + * + * @param sources - array of sources for the concatenation + * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. + * In browser, returns a `Blob` representing all the concatenated inputs. + * + * @internal + */ +async function concat(sources) { + return function () { + const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream); + return node_stream_1.Readable.from((function () { + return tslib_1.__asyncGenerator(this, arguments, function* () { + var _a, e_1, _b, _c; + for (const stream of streams) { + try { + for (var _d = true, stream_1 = (e_1 = void 0, tslib_1.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib_1.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { + _c = stream_1_1.value; + _d = false; + const chunk = _c; + yield yield tslib_1.__await(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = stream_1.return)) yield tslib_1.__await(_b.call(stream_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); + })()); + }; +} +//# sourceMappingURL=concat.js.map /***/ }), -/***/ 98837: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 3224: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const base_distribution_1 = __importDefault(__nccwpck_require__(80007)); -class RcBuild extends base_distribution_1.default { - constructor(nodeInfo) { - super(nodeInfo); +exports.getRawContent = getRawContent; +exports.createFileFromStream = createFileFromStream; +exports.createFile = createFile; +const core_util_1 = __nccwpck_require__(80637); +const typeGuards_js_1 = __nccwpck_require__(58520); +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; +/** + * Private symbol used as key on objects created using createFile containing the + * original source of the file object. + * + * This is used in Node to access the original Node stream without using Blob#stream, which + * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and + * Readable#to/fromWeb in Node versions we support: + * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) + * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) + * + * Once these versions are no longer supported, we may be able to stop doing this. + * + * @internal + */ +const rawContent = Symbol("rawContent"); +function hasRawContent(x) { + return typeof x[rawContent] === "function"; +} +/** + * Extract the raw content from a given blob-like object. If the input was created using createFile + * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. + * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. + * + * @internal + */ +function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); } - getDistributionUrl() { - return 'https://nodejs.org/download/rc'; + else { + return blob.stream(); } } -exports["default"] = RcBuild; - +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { + const s = stream(); + if ((0, typeGuards_js_1.isNodeReadableStream)(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + } + return s; + }, [rawContent]: stream }); +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +function createFile(content, name, options = {}) { + var _a, _b, _c; + if (core_util_1.isNodeLike) { + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); + } + else { + return new File([content], name, options); + } +} +//# sourceMappingURL=file.js.map /***/ }), -/***/ 50969: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 21333: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(70957)); -class CanaryBuild extends base_distribution_prerelease_1.default { - constructor(nodeInfo) { - super(nodeInfo); - this.distribution = 'v8-canary'; - } - getDistributionUrl() { - return 'https://nodejs.org/download/v8-canary'; - } +exports.delay = delay; +exports.parseHeaderValueAsNumber = parseHeaderValueAsNumber; +const abort_controller_1 = __nccwpck_require__(11514); +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise + */ +function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new abort_controller_1.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); } -exports["default"] = CanaryBuild; +/** + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. + */ +function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; +} +//# sourceMappingURL=helpers.js.map + +/***/ }), +/***/ 33106: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.custom = void 0; +const node_util_1 = __nccwpck_require__(47261); +exports.custom = node_util_1.inspect.custom; +//# sourceMappingURL=inspect.js.map /***/ }), -/***/ 70399: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 34472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.run = void 0; -const core = __importStar(__nccwpck_require__(42186)); -const os_1 = __importDefault(__nccwpck_require__(22037)); -const auth = __importStar(__nccwpck_require__(17573)); -const path = __importStar(__nccwpck_require__(71017)); -const cache_restore_1 = __nccwpck_require__(19517); -const cache_utils_1 = __nccwpck_require__(41678); -const installer_factory_1 = __nccwpck_require__(85617); -const util_1 = __nccwpck_require__(92629); -const constants_1 = __nccwpck_require__(69042); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - // - // Version is optional. If supplied, install / use from the tool cache - // If not supplied then task is still used to setup proxy, auth, etc... - // - const version = resolveVersionInput(); - let arch = core.getInput('architecture'); - const cache = core.getInput('cache'); - // if architecture supplied but node-version is not - // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. - if (arch && !version) { - core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); +exports.Sanitizer = void 0; +const core_util_1 = __nccwpck_require__(80637); +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +/** + * @internal + */ +class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); } - if (!arch) { - arch = os_1.default.arch(); + if (key === "headers") { + return this.sanitizeHeaders(value); } - if (version) { - const token = core.getInput('token'); - const auth = !token ? undefined : `token ${token}`; - const stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; - const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; - const nodejsInfo = { - versionSpec: version, - checkLatest, - auth, - stable, - arch - }; - const nodeDistribution = (0, installer_factory_1.getNodejsDistribution)(nodejsInfo); - yield nodeDistribution.setupNodeJs(); + else if (key === "url") { + return this.sanitizeUrl(value); } - yield (0, util_1.printEnvDetailsAndSetOutput)(); - const registryUrl = core.getInput('registry-url'); - const alwaysAuth = core.getInput('always-auth'); - if (registryUrl) { - auth.configAuthentication(registryUrl, alwaysAuth); + else if (key === "query") { + return this.sanitizeQuery(value); } - if (cache && (0, cache_utils_1.isCacheFeatureAvailable)()) { - core.saveState(constants_1.State.CachePackageManager, cache); - const cacheDependencyPath = core.getInput('cache-dependency-path'); - yield (0, cache_restore_1.restoreCache)(cache, cacheDependencyPath); + else if (key === "body") { + // Don't log the request body + return undefined; } - const matchersPath = path.join(__dirname, '../..', '.github'); - core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || (0, core_util_1.isObject)(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null || value === "") { + return value; } - catch (err) { - core.setFailed(err.message); + const url = new URL(value); + if (!url.search) { + return value; } - }); -} -exports.run = run; -function resolveVersionInput() { - let version = core.getInput('node-version'); - const versionFileInput = core.getInput('node-version-file'); - if (version && versionFileInput) { - core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); } - if (version) { - return version; + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; } - if (versionFileInput) { - const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); - const parsedVersion = (0, util_1.getNodeVersionFromFile)(versionFilePath); - if (parsedVersion) { - version = parsedVersion; + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; } - else { - core.warning(`Could not determine node version from ${versionFilePath}. Falling back`); + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } } - core.info(`Resolved ${versionFileInput} as ${version}`); + return sanitized; } - return version; } - +exports.Sanitizer = Sanitizer; +//# sourceMappingURL=sanitizer.js.map /***/ }), -/***/ 92629: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 50601: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.unique = exports.printEnvDetailsAndSetOutput = exports.getNodeVersionFromFile = void 0; -const core = __importStar(__nccwpck_require__(42186)); -const exec = __importStar(__nccwpck_require__(71514)); -const io = __importStar(__nccwpck_require__(47351)); -const fs_1 = __importDefault(__nccwpck_require__(57147)); -const path_1 = __importDefault(__nccwpck_require__(71017)); -function getNodeVersionFromFile(versionFilePath) { - var _a, _b, _c, _d, _e; - if (!fs_1.default.existsSync(versionFilePath)) { - throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); - } - const contents = fs_1.default.readFileSync(versionFilePath, 'utf8'); - // Try parsing the file as an NPM `package.json` file. - try { - const manifest = JSON.parse(contents); - // Presume package.json file. - if (typeof manifest === 'object' && !!manifest) { - // Support Volta. - // See https://docs.volta.sh/guide/understanding#managing-your-project - if ((_a = manifest.volta) === null || _a === void 0 ? void 0 : _a.node) { - return manifest.volta.node; +exports.DEFAULT_CYCLER_OPTIONS = void 0; +exports.createTokenCycler = createTokenCycler; +const helpers_js_1 = __nccwpck_require__(21333); +// Default options for the cycler if none are provided +exports.DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3000, // Allow refresh attempts every 3s + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); } - if ((_b = manifest.engines) === null || _b === void 0 ? void 0 : _b.node) { - return manifest.engines.node; + catch (_a) { + return null; } - // Support Volta workspaces. - // See https://docs.volta.sh/advanced/workspaces - if ((_c = manifest.volta) === null || _c === void 0 ? void 0 : _c.extends) { - const extendedFilePath = path_1.default.resolve(path_1.default.dirname(versionFilePath), manifest.volta.extends); - core.info('Resolving node version from ' + extendedFilePath); - return getNodeVersionFromFile(extendedFilePath); + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); } - // If contents are an object, we parsed JSON - // this can happen if node-version-file is a package.json - // yet contains no volta.node or engines.node - // - // If node-version file is _not_ JSON, control flow - // will not have reached these lines. - // - // And because we've reached here, we know the contents - // *are* JSON, so no further string parsing makes sense. - return null; + return finalToken; } } - catch (_f) { - core.info('Node version file is not JSON file'); + let token = await tryGetAccessToken(); + while (token === null) { + await (0, helpers_js_1.delay)(retryIntervalInMs); + token = await tryGetAccessToken(); } - const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); - return (_e = (_d = found === null || found === void 0 ? void 0 : found.groups) === null || _d === void 0 ? void 0 : _d.version) !== null && _e !== void 0 ? _e : contents.trim(); + return token; } -exports.getNodeVersionFromFile = getNodeVersionFromFile; -function printEnvDetailsAndSetOutput() { - return __awaiter(this, void 0, void 0, function* () { - core.startGroup('Environment details'); - const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { - const pathTool = yield io.which(tool, false); - const output = pathTool ? yield getToolVersion(tool, ['--version']) : ''; - return { tool, output }; - })); - const tools = yield Promise.all(promises); - tools.forEach(({ tool, output }) => { - if (tool === 'node') { - core.setOutput(`${tool}-version`, output); +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = Object.assign(Object.assign({}, exports.DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + if (cycler.isRefreshing) { + return false; } - core.info(`${tool}: ${output}`); - }); - core.endGroup(); - }); -} -exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; -function getToolVersion(tool, options) { - return __awaiter(this, void 0, void 0, function* () { - try { - const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { - ignoreReturnCode: true, - silent: true - }); - if (exitCode > 0) { - core.info(`[warning]${stderr}`); - return ''; + if ((token === null || token === void 0 ? void 0 : token.refreshAfterTimestamp) && token.refreshAfterTimestamp < Date.now()) { + return true; } - return stdout.trim(); + return ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now(); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(scopes, getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + tenantId = undefined; + throw reason; + }); } - catch (err) { - return ''; + return refreshWorker; + } + return async (scopes, tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + const hasClaimChallenge = Boolean(tokenOptions.claims); + const tenantIdChanged = tenantId !== tokenOptions.tenantId; + if (hasClaimChallenge) { + // If we've received a claim, we know the existing token isn't valid + // We want to clear it so that that refresh worker won't use the old expiration time as a timeout + token = null; + } + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh; + if (mustRefresh) { + return refresh(scopes, tokenOptions); + } + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); } - }); -} -const unique = () => { - const encountered = new Set(); - return (value) => { - if (encountered.has(value)) - return false; - encountered.add(value); - return true; + return token; }; -}; -exports.unique = unique; - - -/***/ }), - -/***/ 22877: -/***/ ((module) => { - -module.exports = eval("require")("encoding"); - +} +//# sourceMappingURL=tokenCycler.js.map /***/ }), -/***/ 39491: -/***/ ((module) => { +/***/ 58520: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("assert"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isNodeReadableStream = isNodeReadableStream; +exports.isWebReadableStream = isWebReadableStream; +exports.isReadableStream = isReadableStream; +exports.isBlob = isBlob; +function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); +} +function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); +} +function isBlob(x) { + return typeof x.stream === "function"; +} +//# sourceMappingURL=typeGuards.js.map /***/ }), -/***/ 50852: -/***/ ((module) => { +/***/ 96158: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("async_hooks"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getUserAgentHeaderName = getUserAgentHeaderName; +exports.getUserAgentValue = getUserAgentValue; +const userAgentPlatform_js_1 = __nccwpck_require__(15316); +const constants_js_1 = __nccwpck_require__(43171); +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); +} +/** + * @internal + */ +function getUserAgentHeaderName() { + return (0, userAgentPlatform_js_1.getHeaderName)(); +} +/** + * @internal + */ +async function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION); + await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; +} +//# sourceMappingURL=userAgent.js.map /***/ }), -/***/ 14300: -/***/ ((module) => { +/***/ 15316: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("buffer"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHeaderName = getHeaderName; +exports.setPlatformSpecificData = setPlatformSpecificData; +const tslib_1 = __nccwpck_require__(4351); +const os = tslib_1.__importStar(__nccwpck_require__(70612)); +const process = tslib_1.__importStar(__nccwpck_require__(97742)); +/** + * @internal + */ +function getHeaderName() { + return "User-Agent"; +} +/** + * @internal + */ +async function setPlatformSpecificData(map) { + if (process && process.versions) { + const versions = process.versions; + if (versions.bun) { + map.set("Bun", versions.bun); + } + else if (versions.deno) { + map.set("Deno", versions.deno); + } + else if (versions.node) { + map.set("Node", versions.node); + } + } + map.set("OS", `(${os.arch()}-${os.type()}-${os.release()})`); +} +//# sourceMappingURL=userAgentPlatform.js.map /***/ }), -/***/ 32081: -/***/ ((module) => { +/***/ 59390: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("child_process"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map /***/ }), -/***/ 96206: -/***/ ((module) => { +/***/ 11514: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("console"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +var AbortError_js_1 = __nccwpck_require__(59390); +Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6113: -/***/ ((module) => { +/***/ 19363: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("crypto"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTracingClient = exports.useInstrumenter = void 0; +var instrumenter_js_1 = __nccwpck_require__(63418); +Object.defineProperty(exports, "useInstrumenter", ({ enumerable: true, get: function () { return instrumenter_js_1.useInstrumenter; } })); +var tracingClient_js_1 = __nccwpck_require__(69254); +Object.defineProperty(exports, "createTracingClient", ({ enumerable: true, get: function () { return tracingClient_js_1.createTracingClient; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 67643: -/***/ ((module) => { +/***/ 63418: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("diagnostics_channel"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDefaultTracingSpan = createDefaultTracingSpan; +exports.createDefaultInstrumenter = createDefaultInstrumenter; +exports.useInstrumenter = useInstrumenter; +exports.getInstrumenter = getInstrumenter; +const tracingContext_js_1 = __nccwpck_require__(18110); +const state_js_1 = __nccwpck_require__(81241); +function createDefaultTracingSpan() { + return { + end: () => { + // noop + }, + isRecording: () => false, + recordException: () => { + // noop + }, + setAttribute: () => { + // noop + }, + setStatus: () => { + // noop + }, + addEvent: () => { + // noop + }, + }; +} +function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return undefined; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }), + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + }, + }; +} +/** + * Extends the Azure SDK with support for a given instrumenter implementation. + * + * @param instrumenter - The instrumenter implementation to use. + */ +function useInstrumenter(instrumenter) { + state_js_1.state.instrumenterImplementation = instrumenter; +} +/** + * Gets the currently set instrumenter, a No-Op instrumenter by default. + * + * @returns The currently set instrumenter + */ +function getInstrumenter() { + if (!state_js_1.state.instrumenterImplementation) { + state_js_1.state.instrumenterImplementation = createDefaultInstrumenter(); + } + return state_js_1.state.instrumenterImplementation; +} +//# sourceMappingURL=instrumenter.js.map /***/ }), -/***/ 82361: -/***/ ((module) => { +/***/ 81241: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("events"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.state = void 0; +/** + * @internal + * + * Holds the singleton instrumenter, to be shared across CJS and ESM imports. + */ +exports.state = { + instrumenterImplementation: undefined, +}; +//# sourceMappingURL=state-cjs.cjs.map /***/ }), -/***/ 57147: -/***/ ((module) => { +/***/ 69254: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("fs"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTracingClient = createTracingClient; +const instrumenter_js_1 = __nccwpck_require__(63418); +const tracingContext_js_1 = __nccwpck_require__(18110); +/** + * Creates a new tracing client. + * + * @param options - Options used to configure the tracing client. + * @returns - An instance of {@link TracingClient}. + */ +function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + var _a; + const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); + } + span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), + }); + return { + span, + updatedOptions, + }; + } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } + catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } + finally { + span.end(); + } + } + function withContext(context, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context, callback, ...callbackArgs); + } + /** + * Parses a traceparent header value into a span identifier. + * + * @param traceparentHeader - The traceparent header to parse. + * @returns An implementation-specific identifier for the span. + */ + function parseTraceparentHeader(traceparentHeader) { + return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); + } + /** + * Creates a set of request headers to propagate tracing information to a backend. + * + * @param tracingContext - The context containing the span to serialize. + * @returns The set of headers to add to a request. + */ + function createRequestHeaders(tracingContext) { + return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext); + } + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders, + }; +} +//# sourceMappingURL=tracingClient.js.map /***/ }), -/***/ 13685: -/***/ ((module) => { +/***/ 18110: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("http"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TracingContextImpl = exports.knownContextKeys = void 0; +exports.createTracingContext = createTracingContext; +/** @internal */ +exports.knownContextKeys = { + span: Symbol.for("@azure/core-tracing span"), + namespace: Symbol.for("@azure/core-tracing namespace"), +}; +/** + * Creates a new {@link TracingContext} with the given options. + * @param options - A set of known keys that may be set on the context. + * @returns A new {@link TracingContext} with the given options. + * + * @internal + */ +function createTracingContext(options = {}) { + let context = new TracingContextImpl(options.parentContext); + if (options.span) { + context = context.setValue(exports.knownContextKeys.span, options.span); + } + if (options.namespace) { + context = context.setValue(exports.knownContextKeys.namespace, options.namespace); + } + return context; +} +/** @internal */ +class TracingContextImpl { + constructor(initialContext) { + this._contextMap = + initialContext instanceof TracingContextImpl + ? new Map(initialContext._contextMap) + : new Map(); + } + setValue(key, value) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; + } + getValue(key) { + return this._contextMap.get(key); + } + deleteValue(key) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; + } +} +exports.TracingContextImpl = TracingContextImpl; +//# sourceMappingURL=tracingContext.js.map /***/ }), -/***/ 85158: -/***/ ((module) => { +/***/ 87205: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("http2"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cancelablePromiseRace = cancelablePromiseRace; +/** + * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. + */ +async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; + const aborter = new AbortController(); + function abortHandler() { + aborter.abort(); + } + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); + try { + return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); + } + finally { + aborter.abort(); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); + } +} +//# sourceMappingURL=aborterUtils.js.map /***/ }), -/***/ 95687: -/***/ ((module) => { +/***/ 9972: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("https"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.uint8ArrayToString = uint8ArrayToString; +exports.stringToUint8Array = stringToUint8Array; +/** + * The helper that transforms bytes with specific character encoding into string + * @param bytes - the uint8array bytes + * @param format - the format we use to encode the byte + * @returns a string of the encoded string + */ +function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); +} +/** + * The helper that transforms string to specific character encoded bytes array. + * @param value - the string to be converted + * @param format - the format we use to decode the value + * @returns a uint8array + */ +function stringToUint8Array(value, format) { + return Buffer.from(value, format); +} +//# sourceMappingURL=bytesEncoding.js.map /***/ }), -/***/ 41808: -/***/ ((module) => { +/***/ 97980: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("net"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +var _a, _b, _c, _d; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isReactNative = exports.isNodeRuntime = exports.isNode = exports.isNodeLike = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; +/** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ +// eslint-disable-next-line @azure/azure-sdk/ts-no-window +exports.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ +exports.isWebWorker = typeof self === "object" && + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); +/** + * A constant that indicates whether the environment the code is running is Deno. + */ +exports.isDeno = typeof Deno !== "undefined" && + typeof Deno.version !== "undefined" && + typeof Deno.version.deno !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ +exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + */ +exports.isNodeLike = typeof globalThis.process !== "undefined" && + Boolean(globalThis.process.version) && + Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + * @deprecated Use `isNodeLike` instead. + */ +exports.isNode = exports.isNodeLike; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +exports.isNodeRuntime = exports.isNodeLike && !exports.isBun && !exports.isDeno; +/** + * A constant that indicates whether the environment the code is running is in React-Native. + */ +// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js +exports.isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; +//# sourceMappingURL=checkEnvironment.js.map /***/ }), -/***/ 15673: -/***/ ((module) => { +/***/ 12376: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:events"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createAbortablePromise = createAbortablePromise; +const abort_controller_1 = __nccwpck_require__(54812); +/** + * Creates an abortable promise. + * @param buildPromise - A function that takes the resolve and reject functions as parameters. + * @param options - The options for the abortable promise. + * @returns A promise that can be aborted. + */ +function createAbortablePromise(buildPromise, options) { + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; + return new Promise((resolve, reject) => { + function rejectOnAbort() { + reject(new abort_controller_1.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); + } + function removeListeners() { + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); + } + function onAbort() { + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); + removeListeners(); + rejectOnAbort(); + } + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { + return rejectOnAbort(); + } + try { + buildPromise((x) => { + removeListeners(); + resolve(x); + }, (x) => { + removeListeners(); + reject(x); + }); + } + catch (err) { + reject(err); + } + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); + }); +} +//# sourceMappingURL=createAbortablePromise.js.map /***/ }), -/***/ 84492: -/***/ ((module) => { +/***/ 19259: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:stream"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.delay = delay; +exports.calculateRetryDelay = calculateRetryDelay; +const createAbortablePromise_js_1 = __nccwpck_require__(12376); +const random_js_1 = __nccwpck_require__(93710); +const StandardAbortMessage = "The delay was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. + * @param timeInMs - The number of milliseconds to be delayed. + * @param options - The options for delay - currently abort options + * @returns Promise that is resolved after timeInMs + */ +function delay(timeInMs, options) { + let token; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; + return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve) => { + token = setTimeout(resolve, timeInMs); + }, { + cleanupBeforeAbort: () => clearTimeout(token), + abortSignal, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, + }); +} +/** + * Calculates the delay interval for retry attempts using exponential delay with jitter. + * @param retryAttempt - The current retry attempt number. + * @param config - The exponential retry configuration. + * @returns An object containing the calculated retry delay. + */ +function calculateRetryDelay(retryAttempt, config) { + // Exponentially increase the delay each time + const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); + // Don't let the delay exceed the maximum + const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); + return { retryAfterInMs }; +} +//# sourceMappingURL=delay.js.map /***/ }), -/***/ 47261: -/***/ ((module) => { +/***/ 46734: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:util"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isError = isError; +exports.getErrorMessage = getErrorMessage; +const object_js_1 = __nccwpck_require__(56538); +/** + * Typeguard for an error object shape (has name and message) + * @param e - Something caught by a catch clause. + */ +function isError(e) { + if ((0, object_js_1.isObject)(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; +} +/** + * Given what is thought to be an error object, return the message if possible. + * If the message is missing, returns a stringified version of the input. + * @param e - Something thrown from a try block + * @returns The error message or a string of the input + */ +function getErrorMessage(e) { + if (isError(e)) { + return e.message; + } + else { + let stringified; + try { + if (typeof e === "object" && e) { + stringified = JSON.stringify(e); + } + else { + stringified = String(e); + } + } + catch (err) { + stringified = "[unable to stringify input]"; + } + return `Unknown error ${stringified}`; + } +} +//# sourceMappingURL=error.js.map /***/ }), -/***/ 22037: -/***/ ((module) => { +/***/ 80637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("os"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNodeRuntime = exports.isNodeLike = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.calculateRetryDelay = exports.delay = void 0; +var delay_js_1 = __nccwpck_require__(19259); +Object.defineProperty(exports, "delay", ({ enumerable: true, get: function () { return delay_js_1.delay; } })); +Object.defineProperty(exports, "calculateRetryDelay", ({ enumerable: true, get: function () { return delay_js_1.calculateRetryDelay; } })); +var aborterUtils_js_1 = __nccwpck_require__(87205); +Object.defineProperty(exports, "cancelablePromiseRace", ({ enumerable: true, get: function () { return aborterUtils_js_1.cancelablePromiseRace; } })); +var createAbortablePromise_js_1 = __nccwpck_require__(12376); +Object.defineProperty(exports, "createAbortablePromise", ({ enumerable: true, get: function () { return createAbortablePromise_js_1.createAbortablePromise; } })); +var random_js_1 = __nccwpck_require__(93710); +Object.defineProperty(exports, "getRandomIntegerInclusive", ({ enumerable: true, get: function () { return random_js_1.getRandomIntegerInclusive; } })); +var object_js_1 = __nccwpck_require__(56538); +Object.defineProperty(exports, "isObject", ({ enumerable: true, get: function () { return object_js_1.isObject; } })); +var error_js_1 = __nccwpck_require__(46734); +Object.defineProperty(exports, "isError", ({ enumerable: true, get: function () { return error_js_1.isError; } })); +Object.defineProperty(exports, "getErrorMessage", ({ enumerable: true, get: function () { return error_js_1.getErrorMessage; } })); +var sha256_js_1 = __nccwpck_require__(94793); +Object.defineProperty(exports, "computeSha256Hash", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hash; } })); +Object.defineProperty(exports, "computeSha256Hmac", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hmac; } })); +var typeGuards_js_1 = __nccwpck_require__(1187); +Object.defineProperty(exports, "isDefined", ({ enumerable: true, get: function () { return typeGuards_js_1.isDefined; } })); +Object.defineProperty(exports, "isObjectWithProperties", ({ enumerable: true, get: function () { return typeGuards_js_1.isObjectWithProperties; } })); +Object.defineProperty(exports, "objectHasProperty", ({ enumerable: true, get: function () { return typeGuards_js_1.objectHasProperty; } })); +var uuidUtils_js_1 = __nccwpck_require__(17658); +Object.defineProperty(exports, "randomUUID", ({ enumerable: true, get: function () { return uuidUtils_js_1.randomUUID; } })); +var checkEnvironment_js_1 = __nccwpck_require__(97980); +Object.defineProperty(exports, "isBrowser", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBrowser; } })); +Object.defineProperty(exports, "isBun", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBun; } })); +Object.defineProperty(exports, "isNode", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNode; } })); +Object.defineProperty(exports, "isNodeLike", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeLike; } })); +Object.defineProperty(exports, "isNodeRuntime", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeRuntime; } })); +Object.defineProperty(exports, "isDeno", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isDeno; } })); +Object.defineProperty(exports, "isReactNative", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isReactNative; } })); +Object.defineProperty(exports, "isWebWorker", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isWebWorker; } })); +var bytesEncoding_js_1 = __nccwpck_require__(9972); +Object.defineProperty(exports, "uint8ArrayToString", ({ enumerable: true, get: function () { return bytesEncoding_js_1.uint8ArrayToString; } })); +Object.defineProperty(exports, "stringToUint8Array", ({ enumerable: true, get: function () { return bytesEncoding_js_1.stringToUint8Array; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 71017: -/***/ ((module) => { +/***/ 56538: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("path"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isObject = isObject; +/** + * Helper to determine when an input is a generic JS object. + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=object.js.map /***/ }), -/***/ 4074: -/***/ ((module) => { +/***/ 93710: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("perf_hooks"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRandomIntegerInclusive = getRandomIntegerInclusive; +/** + * Returns a random integer value between a lower and upper bound, + * inclusive of both bounds. + * Note that this uses Math.random and isn't secure. If you need to use + * this for any kind of security purpose, find a better source of random. + * @param min - The smallest integer value allowed. + * @param max - The largest integer value allowed. + */ +function getRandomIntegerInclusive(min, max) { + // Make sure inputs are integers. + min = Math.ceil(min); + max = Math.floor(max); + // Pick a random offset from zero to the size of the range. + // Since Math.random() can never return 1, we have to make the range one larger + // in order to be inclusive of the maximum value after we take the floor. + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; +} +//# sourceMappingURL=random.js.map /***/ }), -/***/ 85477: -/***/ ((module) => { +/***/ 94793: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("punycode"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.computeSha256Hmac = computeSha256Hmac; +exports.computeSha256Hash = computeSha256Hash; +const crypto_1 = __nccwpck_require__(6113); +/** + * Generates a SHA-256 HMAC signature. + * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. + * @param stringToSign - The data to be signed. + * @param encoding - The textual encoding to use for the returned HMAC digest. + */ +async function computeSha256Hmac(key, stringToSign, encoding) { + const decodedKey = Buffer.from(key, "base64"); + return (0, crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); +} +/** + * Generates a SHA-256 hash. + * @param content - The data to be included in the hash. + * @param encoding - The textual encoding to use for the returned hash. + */ +async function computeSha256Hash(content, encoding) { + return (0, crypto_1.createHash)("sha256").update(content).digest(encoding); +} +//# sourceMappingURL=sha256.js.map /***/ }), -/***/ 63477: -/***/ ((module) => { +/***/ 1187: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("querystring"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isDefined = isDefined; +exports.isObjectWithProperties = isObjectWithProperties; +exports.objectHasProperty = objectHasProperty; +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + */ +function objectHasProperty(thing, property) { + return (isDefined(thing) && typeof thing === "object" && property in thing); +} +//# sourceMappingURL=typeGuards.js.map /***/ }), -/***/ 12781: -/***/ ((module) => { +/***/ 17658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("stream"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.randomUUID = randomUUID; +const crypto_1 = __nccwpck_require__(6113); +// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. +const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" + ? globalThis.crypto.randomUUID.bind(globalThis.crypto) + : crypto_1.randomUUID; +/** + * Generated Universally Unique Identifier + * + * @returns RFC4122 v4 UUID. + */ +function randomUUID() { + return uuidFunction(); +} +//# sourceMappingURL=uuidUtils.js.map /***/ }), -/***/ 35356: -/***/ ((module) => { +/***/ 42118: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("stream/web"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map /***/ }), -/***/ 71576: -/***/ ((module) => { +/***/ 54812: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("string_decoder"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +var AbortError_js_1 = __nccwpck_require__(42118); +Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 39512: -/***/ ((module) => { +/***/ 17309: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("timers"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.parseXML = exports.stringifyXML = void 0; +var xml_js_1 = __nccwpck_require__(39170); +Object.defineProperty(exports, "stringifyXML", ({ enumerable: true, get: function () { return xml_js_1.stringifyXML; } })); +Object.defineProperty(exports, "parseXML", ({ enumerable: true, get: function () { return xml_js_1.parseXML; } })); +var xml_common_js_1 = __nccwpck_require__(62060); +Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_ATTRKEY; } })); +Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_CHARKEY; } })); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 24404: -/***/ ((module) => { +/***/ 62060: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("tls"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; +/** + * Default key used to access the XML attributes. + */ +exports.XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +exports.XML_CHARKEY = "_"; +//# sourceMappingURL=xml.common.js.map /***/ }), -/***/ 76224: -/***/ ((module) => { +/***/ 39170: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("tty"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.stringifyXML = stringifyXML; +exports.parseXML = parseXML; +const fast_xml_parser_1 = __nccwpck_require__(12603); +const xml_common_js_1 = __nccwpck_require__(62060); +function getCommonOptions(options) { + var _a; + return { + attributesGroupName: xml_common_js_1.XML_ATTRKEY, + textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY, + ignoreAttributes: false, + suppressBooleanAttributes: false, + }; +} +function getSerializerOptions(options = {}) { + var _a, _b; + return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); +} +function getParserOptions(options = {}) { + return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true }); +} +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the XML building of given JSON object + * `rootName` indicates the name of the root element in the resulting XML + */ +function stringifyXML(obj, opts = {}) { + const parserOptions = getSerializerOptions(opts); + const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); + const node = { [parserOptions.rootNodeName]: obj }; + const xmlData = j2x.build(node); + return `${xmlData}`.replace(/\n/g, ""); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + * `includeRoot` indicates whether the root element is to be included or not in the output + */ +async function parseXML(str, opts = {}) { + if (!str) { + throw new Error("Document is empty"); + } + const validation = fast_xml_parser_1.XMLValidator.validate(str); + if (validation !== true) { + throw validation; + } + const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); + const parsedXml = parser.parse(str); + // Remove the node. + // This is a change in behavior on fxp v4. Issue #424 + if (parsedXml["?xml"]) { + delete parsedXml["?xml"]; + } + if (!opts.includeRoot) { + for (const key of Object.keys(parsedXml)) { + const value = parsedXml[key]; + return typeof value === "object" ? Object.assign({}, value) : value; + } + } + return parsedXml; +} +//# sourceMappingURL=xml.js.map /***/ }), -/***/ 57310: -/***/ ((module) => { +/***/ 80162: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("url"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const log_js_1 = __nccwpck_require__(18898); +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log: log_js_1.log, +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend, + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +exports["default"] = debugObj; +//# sourceMappingURL=debug.js.map /***/ }), -/***/ 73837: -/***/ ((module) => { +/***/ 89497: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("util"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AzureLogger = void 0; +exports.setLogLevel = setLogLevel; +exports.getLogLevel = getLogLevel; +exports.createClientLogger = createClientLogger; +const tslib_1 = __nccwpck_require__(4351); +const debug_js_1 = tslib_1.__importDefault(__nccwpck_require__(80162)); +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +exports.AzureLogger = (0, debug_js_1.default)("azure"); +exports.AzureLogger.log = (...args) => { + debug_js_1.default.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug_js_1.default.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100, +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +function createClientLogger(namespace) { + const clientRootLogger = exports.AzureLogger.extend(namespace); + patchLogMethod(exports.AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose"), + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level, + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug_js_1.default.disable(); + debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map /***/ }), -/***/ 29830: -/***/ ((module) => { +/***/ 18898: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("util/types"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.log = log; +const tslib_1 = __nccwpck_require__(4351); +const node_os_1 = __nccwpck_require__(70612); +const node_util_1 = tslib_1.__importDefault(__nccwpck_require__(47261)); +const process = tslib_1.__importStar(__nccwpck_require__(97742)); +function log(message, ...args) { + process.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); +} +//# sourceMappingURL=log.js.map /***/ }), -/***/ 71267: -/***/ ((module) => { +/***/ 58412: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -module.exports = require("worker_threads"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +exports.AbortError = AbortError; +//# sourceMappingURL=AbortError.js.map /***/ }), -/***/ 59796: -/***/ ((module) => { +/***/ 1753: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -module.exports = require("zlib"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AbortError = void 0; +var AbortError_js_1 = __nccwpck_require__(58412); +Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); +//# sourceMappingURL=index.js.map /***/ }), diff --git a/package-lock.json b/package-lock.json index e766bbce7..b78ee5aa2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "setup-node", - "version": "4.0.0", + "version": "4.0.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "setup-node", - "version": "4.0.0", + "version": "4.0.1", "license": "MIT", "dependencies": { - "@actions/cache": "npm:github-actions.warp-cache@1.1.16", + "@actions/cache": "npm:github-actions.warp-cache@1.4.0", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.0", "@actions/github": "^5.1.1", @@ -50,25 +50,40 @@ }, "node_modules/@actions/cache": { "name": "github-actions.warp-cache", - "version": "1.1.16", - "resolved": "https://registry.npmjs.org/github-actions.warp-cache/-/github-actions.warp-cache-1.1.16.tgz", - "integrity": "sha512-H7aTU3RPLbh74UiKHuVH2G/EEosdSQi+BFvotQLY3J2t7axbI9V3Dvf1YXZu/JtAb9Fl4UXRlRCTSHXtgQh1yA==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/github-actions.warp-cache/-/github-actions.warp-cache-1.4.0.tgz", + "integrity": "sha512-hmN4/03zHOYxJjXID46Yy8nK4p8jMf+rWdCYlFPRzoo4FgDG50RbLZjZZeGs5I20sTHe/Al3MOgqXEvfKjfCQA==", + "license": "MIT", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", + "@actions/github": "^6.0.0", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.1.1", "@actions/io": "^1.0.1", "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", - "@azure/storage-blob": "^12.13.0", + "@azure/storage-blob": "^12.25.0", "@google-cloud/storage": "^7.9.0", + "@octokit/webhooks-definitions": "^3.67.3", "axios": "^1.6.2", "google-auth-library": "^9.7.0", "semver": "^6.3.1", "uuid": "^3.3.3" } }, + "node_modules/@actions/cache/node_modules/@actions/github": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", + "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", + "license": "MIT", + "dependencies": { + "@actions/http-client": "^2.2.0", + "@octokit/core": "^5.0.1", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/plugin-rest-endpoint-methods": "^10.0.0" + } + }, "node_modules/@actions/cache/node_modules/@actions/glob": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", @@ -78,6 +93,164 @@ "minimatch": "^3.0.4" } }, + "node_modules/@actions/cache/node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/core": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.0.tgz", + "integrity": "sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg==", + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/endpoint": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.5.tgz", + "integrity": "sha512-ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/graphql": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.0.tgz", + "integrity": "sha512-r+oZUH7aMFui1ypZnAvZmn0KSqAUgE1/tUXIWaqUCa1758ts/Jio84GZuzsvUkme98kv0WFY8//n0J1Z+vsIsQ==", + "license": "MIT", + "dependencies": { + "@octokit/request": "^8.3.0", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==", + "license": "MIT" + }, + "node_modules/@actions/cache/node_modules/@octokit/plugin-paginate-rest": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz", + "integrity": "sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@actions/cache/node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", + "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@actions/cache/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/request": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz", + "integrity": "sha512-9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/request-error": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz", + "integrity": "sha512-GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@actions/cache/node_modules/@octokit/types": { + "version": "13.6.1", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", + "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, "node_modules/@actions/cache/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -209,121 +382,230 @@ } }, "node_modules/@azure/core-auth": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.5.0.tgz", - "integrity": "sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.9.0.tgz", + "integrity": "sha512-FPwHpZywuyasDSLMqJ6fhbOK3TqUdviZNF8OqRGA4W5Ewib2lEEZ+pBsYcBa88B2NGO/SEnYPGhyBqNlE8ilSw==", + "license": "MIT", "dependencies": { - "@azure/abort-controller": "^1.0.0", - "@azure/core-util": "^1.1.0", - "tslib": "^2.2.0" + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.11.0", + "tslib": "^2.6.2" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" } }, - "node_modules/@azure/core-http": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.3.tgz", - "integrity": "sha512-QMib3wXotJMFhHgmJBPUF9YsyErw34H0XDFQd9CauH7TPB+RGcyl9Ayy7iURtJB04ngXhE6YwrQsWDXlSLrilg==", + "node_modules/@azure/core-auth/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-client": { + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.9.2.tgz", + "integrity": "sha512-kRdry/rav3fUKHl/aDLd/pDLcB+4pOFwPPTVEExuMyaI5r+JBbMWqRbCY1pn5BniDaU3lRxO9eaQ1AmSMehl/w==", + "license": "MIT", "dependencies": { - "@azure/abort-controller": "^1.0.0", - "@azure/core-auth": "^1.3.0", - "@azure/core-tracing": "1.0.0-preview.13", - "@azure/core-util": "^1.1.1", + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-rest-pipeline": "^1.9.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.6.1", "@azure/logger": "^1.0.0", - "@types/node-fetch": "^2.5.0", - "@types/tunnel": "^0.0.3", - "form-data": "^4.0.0", - "node-fetch": "^2.6.7", - "process": "^0.11.10", - "tslib": "^2.2.0", - "tunnel": "^0.0.6", - "uuid": "^8.3.0", - "xml2js": "^0.5.0" + "tslib": "^2.6.2" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" } }, - "node_modules/@azure/core-http/node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "node_modules/@azure/core-client/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" + "tslib": "^2.6.2" }, "engines": { - "node": ">= 6" + "node": ">=18.0.0" } }, - "node_modules/@azure/core-http/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" + "node_modules/@azure/core-http-compat": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.1.2.tgz", + "integrity": "sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-client": "^1.3.0", + "@azure/core-rest-pipeline": "^1.3.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-http-compat/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@azure/core-lro": { - "version": "2.5.4", - "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.5.4.tgz", - "integrity": "sha512-3GJiMVH7/10bulzOKGrrLeG/uCBH/9VtxqaMcB9lIqAeamI/xYQSHJL/KcsLDuH+yTjYpro/u6D/MuRe4dN70Q==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", + "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", + "license": "MIT", "dependencies": { - "@azure/abort-controller": "^1.0.0", + "@azure/abort-controller": "^2.0.0", "@azure/core-util": "^1.2.0", "@azure/logger": "^1.0.0", - "tslib": "^2.2.0" + "tslib": "^2.6.2" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@azure/core-paging": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.5.0.tgz", - "integrity": "sha512-zqWdVIt+2Z+3wqxEOGzR5hXFZ8MGKK52x4vFLw8n58pR6ZfKRx3EXYTxTaYxYHc/PexPUTyimcTWFJbji9Z6Iw==", + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", + "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", + "license": "MIT", "dependencies": { - "tslib": "^2.2.0" + "tslib": "^2.6.2" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.18.0.tgz", + "integrity": "sha512-QSoGUp4Eq/gohEFNJaUOwTN7BCc2nHTjjbm75JT0aD7W65PWM1H/tItz0GsABn22uaKyGxiMhWQLt2r+FGU89Q==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.8.0", + "@azure/core-tracing": "^1.0.1", + "@azure/core-util": "^1.11.0", + "@azure/logger": "^1.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline/node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" } }, "node_modules/@azure/core-tracing": { - "version": "1.0.0-preview.13", - "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", - "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.2.0.tgz", + "integrity": "sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg==", + "license": "MIT", "dependencies": { - "@opentelemetry/api": "^1.0.1", - "tslib": "^2.2.0" + "tslib": "^2.6.2" }, "engines": { - "node": ">=12.0.0" + "node": ">=18.0.0" } }, "node_modules/@azure/core-util": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.5.0.tgz", - "integrity": "sha512-GZBpVFDtQ/15hW1OgBcRdT4Bl7AEpcEZqLfbAvOtm1CQUncKWiYapFHVD588hmlV27NbOOtSm3cnLF3lvoHi4g==", + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.11.0.tgz", + "integrity": "sha512-DxOSLua+NdpWoSqULhjDyAZTXFdP/LKkqtYuxxz1SCN289zk3OG8UOpnCQAz/tygyACBtWp/BoO72ptK7msY8g==", + "license": "MIT", "dependencies": { - "@azure/abort-controller": "^1.0.0", - "tslib": "^2.2.0" + "@azure/abort-controller": "^2.0.0", + "tslib": "^2.6.2" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-util/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-xml": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.4.4.tgz", + "integrity": "sha512-J4FYAqakGXcbfeZjwjMzjNcpcH4E+JtEBv+xcV1yL0Ydn/6wbQfeFKTCHh9wttAi0lmajHw7yBbHPRG+YHckZQ==", + "license": "MIT", + "dependencies": { + "fast-xml-parser": "^4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@azure/logger": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.4.tgz", - "integrity": "sha512-ustrPY8MryhloQj7OWGe+HrYx+aoiOxzbXTtgblbV3xwCqpzUK36phH3XNHQKj3EPonyFUuDTfR3qFhTEAuZEg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.4.tgz", + "integrity": "sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==", + "license": "MIT", "dependencies": { - "tslib": "^2.2.0" + "tslib": "^2.6.2" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" } }, "node_modules/@azure/ms-rest-js": { @@ -355,21 +637,39 @@ } }, "node_modules/@azure/storage-blob": { - "version": "12.16.0", - "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.16.0.tgz", - "integrity": "sha512-jz33rUSUGUB65FgYrTRgRDjG6hdPHwfvHe+g/UrwVG8MsyLqSxg9TaW7Yuhjxu1v1OZ5xam2NU6+IpCN0xJO8Q==", + "version": "12.26.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.26.0.tgz", + "integrity": "sha512-SriLPKezypIsiZ+TtlFfE46uuBIap2HeaQVS78e1P7rz5OSbq0rsd52WE1mC5f7vAeLiXqv7I7oRhL3WFZEw3Q==", + "license": "MIT", "dependencies": { - "@azure/abort-controller": "^1.0.0", - "@azure/core-http": "^3.0.0", + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.4.0", + "@azure/core-client": "^1.6.2", + "@azure/core-http-compat": "^2.0.0", "@azure/core-lro": "^2.2.0", "@azure/core-paging": "^1.1.1", - "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-rest-pipeline": "^1.10.1", + "@azure/core-tracing": "^1.1.2", + "@azure/core-util": "^1.6.1", + "@azure/core-xml": "^1.4.3", "@azure/logger": "^1.0.0", "events": "^3.0.0", "tslib": "^2.2.0" }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@azure/storage-blob/node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@babel/code-frame": { @@ -1749,13 +2049,12 @@ "@octokit/openapi-types": "^12.11.0" } }, - "node_modules/@opentelemetry/api": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.6.0.tgz", - "integrity": "sha512-OWlrQAnWn9577PhVgqjUvMr1pg57Bc4jv0iL4w0PRuOSRvq67rvHW9Ie/dZVMvCzhSCB+UxhcY/PmCmFj33Q+g==", - "engines": { - "node": ">=8.0.0" - } + "node_modules/@octokit/webhooks-definitions": { + "version": "3.68.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-definitions/-/webhooks-definitions-3.68.1.tgz", + "integrity": "sha512-wa8koFift24mUsMarWP/wfl9kIwqL5TK9smsCRIyJYfs9iYQEoJsQjcmhyKCmevPA8Ja/K1ZTE4W8ABA0yMM8g==", + "deprecated": "Use @octokit/webhooks-types, @octokit/webhooks-schemas, or @octokit/webhooks-examples instead. See https://github.com/octokit/webhooks/issues/447", + "license": "MIT" }, "node_modules/@sinclair/typebox": { "version": "0.27.8", @@ -1892,28 +2191,6 @@ "undici-types": "~5.26.4" } }, - "node_modules/@types/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-lX17GZVpJ/fuCjguZ5b3TjEbSENxmEk1B2z02yoXSK9WMEWRivhdSY73wWMn6bpcCDAOh6qAdktpKHIlkDk2lg==", - "dependencies": { - "@types/node": "*", - "form-data": "^4.0.0" - } - }, - "node_modules/@types/node-fetch/node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/@types/request": { "version": "2.48.12", "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", @@ -1942,14 +2219,6 @@ "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==" }, - "node_modules/@types/tunnel": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", - "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/yargs": { "version": "17.0.29", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.29.tgz", @@ -2513,12 +2782,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -3274,6 +3543,7 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "license": "MIT", "engines": { "node": ">=0.8.x" } @@ -3378,9 +3648,9 @@ "dev": true }, "node_modules/fast-xml-parser": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.0.tgz", - "integrity": "sha512-kLY3jFlwIYwBNDojclKsNAC12sfD6NwW74QB2CoNGPvtVxjliYehVunB3HYyNi+n4Tt1dAcgwYvmKF/Z18flqg==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz", + "integrity": "sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==", "funding": [ { "type": "github", @@ -3391,6 +3661,7 @@ "url": "https://paypal.me/naturalintelligence" } ], + "license": "MIT", "dependencies": { "strnum": "^1.0.5" }, @@ -3429,9 +3700,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -4807,12 +5078,12 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -5229,14 +5500,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/prompts": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", @@ -5945,9 +6208,9 @@ } }, "node_modules/undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "dependencies": { "@fastify/busboy": "^2.0.0" }, diff --git a/package.json b/package.json index 962c37cb7..616b04cd0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "setup-node", - "version": "4.0.0", + "version": "4.0.1", "private": true, "description": "setup node action", "main": "lib/setup-node.js", @@ -25,7 +25,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "npm:github-actions.warp-cache@1.1.16", + "@actions/cache": "npm:github-actions.warp-cache@1.4.0", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.0", "@actions/github": "^5.1.1", diff --git a/src/cache-restore.ts b/src/cache-restore.ts index 3b230970e..af12ad83c 100644 --- a/src/cache-restore.ts +++ b/src/cache-restore.ts @@ -3,6 +3,7 @@ import * as core from '@actions/core'; import * as glob from '@actions/glob'; import path from 'path'; import fs from 'fs'; +import os from 'os'; import {State} from './constants'; import { @@ -21,6 +22,7 @@ export const restoreCache = async ( throw new Error(`Caching for '${packageManager}' is not supported`); } const platform = process.env.RUNNER_OS; + const arch = os.arch(); const cachePaths = await getCacheDirectories( packageManagerInfo, @@ -38,7 +40,7 @@ export const restoreCache = async ( ); } - const keyPrefix = `node-cache-${platform}-${packageManager}`; + const keyPrefix = `node-cache-${platform}-${arch}-${packageManager}`; const primaryKey = `${keyPrefix}-${fileHash}`; core.debug(`primary key is ${primaryKey}`); diff --git a/src/cache-utils.ts b/src/cache-utils.ts index 7066d7333..89841bc10 100644 --- a/src/cache-utils.ts +++ b/src/cache-utils.ts @@ -295,7 +295,13 @@ export function isGhes(): boolean { const ghUrl = new URL( process.env['GITHUB_SERVER_URL'] || 'https://github.com' ); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; + + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === 'GITHUB.COM'; + const isGitHubEnterpriseCloudHost = hostname.endsWith('.GHE.COM'); + const isLocalHost = hostname.endsWith('.LOCALHOST'); + + return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; } export function isCacheFeatureAvailable(): boolean { diff --git a/src/distributions/base-distribution.ts b/src/distributions/base-distribution.ts index cf5bb5449..70b4b5724 100644 --- a/src/distributions/base-distribution.ts +++ b/src/distributions/base-distribution.ts @@ -150,7 +150,7 @@ export default abstract class BaseDistribution { throw err; } - const toolPath = await this.extractArchive(downloadPath, info); + const toolPath = await this.extractArchive(downloadPath, info, true); core.info('Done'); return toolPath; @@ -210,7 +210,8 @@ export default abstract class BaseDistribution { protected async extractArchive( downloadPath: string, - info: INodeVersionInfo | null + info: INodeVersionInfo | null, + isOfficialArchive?: boolean ) { // // Extract @@ -225,7 +226,7 @@ export default abstract class BaseDistribution { // on Windows runners without PowerShell Core. // // For default PowerShell Windows it should contain extension type to unpack it. - if (extension === '.zip') { + if (extension === '.zip' && isOfficialArchive) { const renamedArchive = `${downloadPath}.zip`; fs.renameSync(downloadPath, renamedArchive); extPath = await tc.extractZip(renamedArchive); diff --git a/src/distributions/official_builds/official_builds.ts b/src/distributions/official_builds/official_builds.ts index 4e368b001..e56eaf812 100644 --- a/src/distributions/official_builds/official_builds.ts +++ b/src/distributions/official_builds/official_builds.ts @@ -88,7 +88,11 @@ export default class OfficialBuilds extends BaseDistribution { ); if (downloadPath) { - toolPath = await this.extractArchive(downloadPath, versionInfo); + toolPath = await this.extractArchive( + downloadPath, + versionInfo, + false + ); } } else { core.info(